Ejemplo n.º 1
0
import configs.config_loader as cfg_loader
import os
import trimesh
import numpy as np
from tqdm import tqdm

cfg = cfg_loader.get_config()

device = torch.device("cuda")
net = model.NDF()

dataset = voxelized_data.VoxelizedDataset('test',
                                          res=cfg.input_res,
                                          pointcloud_samples=cfg.num_points,
                                          data_path=cfg.data_dir,
                                          split_file=cfg.split_file,
                                          batch_size=1,
                                          num_sample_points=cfg.num_sample_points_generation,
                                          num_workers=30,
                                          sample_distribution=cfg.sample_ratio,
                                          sample_sigmas=cfg.sample_std_dev)

gen = Generator(net, cfg.exp_name, device=device)

out_path = 'experiments/{}/evaluation/'.format(cfg.exp_name)


def gen_iterator(out_path, dataset, gen_p):
    global gen
    gen = gen_p

    if not os.path.exists(out_path):
Ejemplo n.º 2
0
    args = parser.parse_known_args()[0]

if args.model == 'ShapeNet32Vox':
    net = model.ShapeNet32Vox()

if args.model == 'ShapeNet128Vox':
    net = model.ShapeNet128Vox()

if args.model == 'ShapeNetPoints':
    net = model.ShapeNetPoints()

train_dataset = voxelized_data.VoxelizedDataset(
    'train',
    voxelized_pointcloud=args.pointcloud,
    pointcloud_samples=args.pc_samples,
    res=args.res,
    sample_distribution=args.sample_distribution,
    sample_sigmas=args.sample_sigmas,
    num_sample_points=50000,
    batch_size=args.batch_size,
    num_workers=30)

val_dataset = voxelized_data.VoxelizedDataset(
    'val',
    voxelized_pointcloud=args.pointcloud,
    pointcloud_samples=args.pc_samples,
    res=args.res,
    sample_distribution=args.sample_distribution,
    sample_sigmas=args.sample_sigmas,
    num_sample_points=50000,
    batch_size=args.batch_size,
    num_workers=30)
Ejemplo n.º 3
0
import models.data.voxelized_data_shapenet as voxelized_data
from models import training
import torch
import configs.config_loader as cfg_loader


cfg = cfg_loader.get_config()
net = model.NDF()



train_dataset = voxelized_data.VoxelizedDataset('train',
                                          res=cfg.input_res,
                                          pointcloud_samples=cfg.num_points,
                                          data_path=cfg.data_dir,
                                          split_file=cfg.split_file,
                                          batch_size=cfg.batch_size,
                                          num_sample_points=cfg.num_sample_points_training,
                                          num_workers=30,
                                          sample_distribution=cfg.sample_ratio,
                                          sample_sigmas=cfg.sample_std_dev)
val_dataset = voxelized_data.VoxelizedDataset('val',
                                          res=cfg.input_res,
                                          pointcloud_samples=cfg.num_points,
                                          data_path=cfg.data_dir,
                                          split_file=cfg.split_file,
                                          batch_size=cfg.batch_size,
                                          num_sample_points=cfg.num_sample_points_training,
                                          num_workers=30,
                                          sample_distribution=cfg.sample_ratio,
                                          sample_sigmas=cfg.sample_std_dev)
Ejemplo n.º 4
0
    net = model.ShapeNet32Vox()

if args.model == 'ShapeNet128Vox':
    net = model.ShapeNet128Vox()

if args.model == 'ShapeNetPoints':
    net = model.ShapeNetPoints()

if args.model == 'SVR':
    net = model.SVR()

dataset = voxelized_data.VoxelizedDataset(
    args.mode,
    voxelized_pointcloud=args.pointcloud,
    pointcloud_samples=args.pc_samples,
    res=args.res,
    sample_distribution=args.sample_distribution,
    sample_sigmas=args.sample_sigmas,
    num_sample_points=100,
    batch_size=1,
    num_workers=0)

exp_name = 'i{}_dist-{}sigmas-{}v{}_m{}'.format(
    'PC' + str(args.pc_samples) if args.pointcloud else 'Voxels',
    ''.join(str(e) + '_' for e in args.sample_distribution),
    ''.join(str(e) + '_' for e in args.sample_sigmas), args.res, args.model)

gen = Generator(net,
                0.5,
                exp_name,
                checkpoint=args.checkpoint,
                resolution=args.retrieval_res,
Ejemplo n.º 5
0
def loadNDF(index,
            pointcloud_samples,
            exp_name,
            data_dir,
            split_file,
            sample_distribution,
            sample_sigmas,
            res,
            mode='test'):

    global encoding
    global net
    global device

    net = model.NDF()

    device = torch.device("cuda")

    if 'garments' in exp_name.lower():

        dataset = dataloader_garments.VoxelizedDataset(
            mode=mode,
            data_path=data_dir,
            split_file=split_file,
            res=res,
            density=0,
            pointcloud_samples=pointcloud_samples,
            sample_distribution=sample_distribution,
            sample_sigmas=sample_sigmas,
        )

        checkpoint = 'checkpoint_127h:6m:33s_457593.9149734974'

        generator = generation.Generator(net,
                                         exp_name,
                                         checkpoint=checkpoint,
                                         device=device)

    if 'cars' in exp_name.lower():

        dataset = voxelized_data_shapenet.VoxelizedDataset(
            mode=mode,
            res=res,
            pointcloud_samples=pointcloud_samples,
            data_path=data_dir,
            split_file=split_file,
            sample_distribution=sample_distribution,
            sample_sigmas=sample_sigmas,
            batch_size=1,
            num_sample_points=1024,
            num_workers=1)

        checkpoint = 'checkpoint_108h:5m:50s_389150.3971107006'

        generator = generation.Generator(net,
                                         exp_name,
                                         checkpoint=checkpoint,
                                         device=device)

    example = dataset[index]

    print('Object: ', example['path'])
    inputs = torch.from_numpy(example['inputs']).unsqueeze(0).to(
        device)  # lead inputs and samples including one batch channel

    for param in net.parameters():
        param.requires_grad = False

    encoding = net.encoder(inputs)