예제 #1
0
def get_datasets(config):
    if (config.dataset == 'indoor'):
        info_train = load_obj(config.train_info)
        info_val = load_obj(config.val_info)
        info_benchmark = load_obj(f'configs/indoor/{config.benchmark}.pkl')

        train_set = IndoorDataset(info_train, config, data_augmentation=True)
        val_set = IndoorDataset(info_val, config, data_augmentation=False)
        benchmark_set = IndoorDataset(info_benchmark,
                                      config,
                                      data_augmentation=False)
    elif (config.dataset == 'kitti'):
        train_set = KITTIDataset(config, 'train', data_augmentation=True)
        val_set = KITTIDataset(config, 'val', data_augmentation=False)
        benchmark_set = KITTIDataset(config, 'test', data_augmentation=False)
    else:
        raise NotImplementedError

    return train_set, val_set, benchmark_set
예제 #2
0
    elif config.optimizer == 'ADAM':
        config.optimizer = optim.Adam(
            config.model.parameters(), 
            lr=config.lr,
            betas=(0.9, 0.999),
            weight_decay=config.weight_decay,
        )
    
    # create learning rate scheduler
    config.scheduler = optim.lr_scheduler.ExponentialLR(
        config.optimizer,
        gamma=config.scheduler_gamma,
    )
    
    # create dataset and dataloader
    info_train = load_obj(config.train_info)
    info_val = load_obj(config.val_info)
    info_benchmark = load_obj(f'configs/{config.test_info}.pkl')

    train_set = ThreeDMatchDownsampled(info_train,config,data_augmentation=True)
    val_set = ThreeDMatchDownsampled(info_val,config,data_augmentation=False)
    benchmark_set = ThreeDMatchDownsampled(info_benchmark,config, data_augmentation=False)

    config.train_loader, neighborhood_limits = get_dataloader(dataset=train_set,
                                        batch_size=config.batch_size,
                                        shuffle=True,
                                        num_workers=config.num_workers,
                                        )
    config.val_loader, _ = get_dataloader(dataset=val_set,
                                        batch_size=config.batch_size,
                                        shuffle=False,
예제 #3
0
        'simple',
        'resnetb',
    ]
    for i in range(config.num_layers - 1):
        config.architecture.append('resnetb_strided')
        config.architecture.append('resnetb')
        config.architecture.append('resnetb')
    for i in range(config.num_layers - 2):
        config.architecture.append('nearest_upsample')
        config.architecture.append('unary')
    config.architecture.append('nearest_upsample')
    config.architecture.append('last_unary')
    config.model = KPFCNN(config).to(config.device)

    # create dataset and dataloader
    info_train = load_obj(config.train_info)
    train_set = IndoorDataset(info_train, config, data_augmentation=True)
    demo_set = ThreeDMatchDemo(config, config.src_pcd, config.tgt_pcd)

    _, neighborhood_limits = get_dataloader(
        dataset=train_set,
        batch_size=config.batch_size,
        shuffle=True,
        num_workers=config.num_workers,
    )
    demo_loader, _ = get_dataloader(dataset=demo_set,
                                    batch_size=config.batch_size,
                                    shuffle=False,
                                    num_workers=1,
                                    neighborhood_limits=neighborhood_limits)