Beispiel #1
0
                             batch_size=1,
                             sampler=test_sampler,
                             collate_fn=collate_fn,
                             num_workers=config.input_threads,
                             pin_memory=True)

    # Calibrate samplers
    test_sampler.calibration(test_loader, verbose=True)

    print('\nModel Preparation')
    print('*****************')

    # Define network model
    t1 = time.time()
    if config.dataset_task == 'classification':
        net = KPCNN(config)
    # elif config.dataset_task in ['cloud_segmentation', 'slam_segmentation']:
    #     net = KPFCNN(config, test_dataset.label_values, test_dataset.ignored_labels)
    elif config.dataset_task in ['cloud_segmentation', 'slam_segmentation']:
        net = KPFCNN_featureAggre(config, test_dataset.label_values, test_dataset.ignored_labels)
    else:
        raise ValueError('Unsupported dataset_task for testing: ' + config.dataset_task)

    # Define a visualizer class
    tester = ModelTester(net, chkp_path=chosen_chkp)
    print('Done in {:.1f}s\n'.format(time.time() - t1))

    print('\nStart test')
    print('**********\n')

    # Training
def main():
    os.environ["CUDA_VISIBLE_DEVICES"] = "1"

    print("Load dataset...")
    training_dataset = CustomDataset(
        train=True,
        subsampling=True,
        first_subsampling_dl=config.SETTING['first_subsampling_dl'])
    test_dataset = CustomDataset(
        train=False,
        subsampling=True,
        first_subsampling_dl=config.SETTING['first_subsampling_dl'])

    training_sampler = CustomDataSampler(training_dataset, balance_labels=True)
    test_sampler = CustomDataSampler(test_dataset, balance_labels=True)

    training_loader = DataLoader(training_dataset,
                                 batch_size=1,
                                 sampler=training_sampler,
                                 collate_fn=ModelNet40Collate,
                                 num_workers=config.SETTING['input_threads'],
                                 pin_memory=True)

    test_loader = DataLoader(test_dataset,
                             batch_size=1,
                             sampler=test_sampler,
                             collate_fn=ModelNet40Collate,
                             num_workers=config.SETTING['input_threads'],
                             pin_memory=True)

    print("ok!")

    print("Check device...")
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    print(device)
    print("ok!")

    print("Load model...")
    model = KPCNN()
    model = model.to(device)
    print("ok!")

    print("Load optimizer...")
    deform_params = [v for k, v in model.named_parameters() if 'offset' in k]
    other_params = [
        v for k, v in model.named_parameters() if 'offset' not in k
    ]
    deform_lr = config.TRAIN['learning_rate'] * config.TRAIN['deform_lr_factor']
    optimizer = torch.optim.SGD([{
        'params': other_params
    }, {
        'params': deform_params,
        'lr': deform_lr
    }],
                                lr=config.TRAIN['learning_rate'],
                                momentum=config.TRAIN['momentum'],
                                weight_decay=config.TRAIN['weight_decay'])
    print("ok!")

    # Starting training
    print("Starting training... ")

    best = 1

    for epoch in range(config.TRAIN['epoch']):
        train(epoch, training_loader, optimizer, model, device)
        acc = valid(test_loader, model, device)
Beispiel #3
0
                             batch_size=1,
                             sampler=test_sampler,
                             collate_fn=collate_fn,
                             num_workers=config.input_threads,
                             pin_memory=True)

    # Calibrate samplers
    test_sampler.calibration(test_loader, verbose=True)

    print('\nModel Preparation')
    print('*****************')

    # Define network model
    t1 = time.time()
    if config.dataset_task == 'classification':
        net = KPCNN(config)
    elif config.dataset_task in ['cloud_segmentation', 'slam_segmentation']:
        net = KPFCNN(config, test_dataset.label_values,
                     test_dataset.ignored_labels)
    else:
        raise ValueError('Unsupported dataset_task for testing: ' +
                         config.dataset_task)
    net = torch.nn.DataParallel(net.cuda())
    # Define a visualizer class
    tester = ModelTester(net, chkp_path=chosen_chkp)
    print('Done in {:.1f}s\n'.format(time.time() - t1))

    print('\nStart test')
    print('**********\n')

    # Training