Ejemplo n.º 1
0
        db_test_list.append(db)

if db_test_list:
    db_test = torch.utils.data.ConcatDataset(db_test_list)
    testLoader = DataLoader(db_test,
                            batch_size=config.eval.batchSize,
                            shuffle=False,
                            num_workers=config.eval.numWorkers,
                            drop_last=False)
else:
    raise ValueError('No valid datasets were passed in config file')

###################### ModelBuilder #############################
if config_checkpoint.train.model == 'deeplab_xception':
    model = deeplab.DeepLab(num_classes=config.train.numClasses,
                            backbone='xception',
                            sync_bn=True,
                            freeze_bn=False)
elif config_checkpoint.train.model == 'deeplab_resnet':
    model = deeplab.DeepLab(num_classes=config.train.numClasses,
                            backbone='resnet',
                            sync_bn=True,
                            freeze_bn=False)
elif config_checkpoint.train.model == 'drn':
    model = deeplab.DeepLab(num_classes=config.train.numClasses,
                            backbone='drn',
                            sync_bn=True,
                            freeze_bn=False)
else:
    raise ValueError('Invalid model passed')
model.load_state_dict(CHECKPOINT['model_state_dict'])
Ejemplo n.º 2
0
        augmentations.GenerateContour(),
        augmentations.GenerateInstanceMap(output_size),
        augmentations.ToTensor(),
    ])
    vali_set = detection.DetectionDataset(
        os.path.join(vali_dataset_dir, image_dir),
        os.path.join(vali_dataset_dir, anno_dir), class_map, vali_transforms)
    vali_sampler = torch.utils.data.sampler.RandomSampler(
        vali_set, True, epoch_size)
    vali_loader = torch.utils.data.DataLoader(vali_set,
                                              batch_size=batch_size,
                                              num_workers=num_workers,
                                              sampler=vali_sampler,
                                              pin_memory=True)

    backbone = deeplab.DeepLab(num_classes)
    model = pano_seg.PanopticSegment(backbone)
    solver = Detector(model,
                      class_map,
                      train_loader,
                      vali_loader,
                      batch_size,
                      optimizer=optimizer,
                      lr=lr,
                      checkpoint_name=checkpoint_name,
                      devices=devices,
                      num_classes=num_classes,
                      log_size=log_size,
                      prob_threshold=prob_threshold)

    if checkpoint_path:
Ejemplo n.º 3
0
        augmentations.GenerateHeatmap(num_classes, output_size, cov),
        augmentations.GenerateMask(num_classes, output_size),
        augmentations.ToTensor(),
    ])
    vali_set = detection.DetectionDataset(
        os.path.join(vali_dataset_dir, image_dir),
        os.path.join(vali_dataset_dir, anno_dir), class_map, vali_transforms)
    vali_sampler = torch.utils.data.sampler.RandomSampler(
        vali_set, True, epoch_size)
    vali_loader = torch.utils.data.DataLoader(vali_set,
                                              batch_size=batch_size,
                                              num_workers=num_workers,
                                              sampler=vali_sampler,
                                              pin_memory=True)

    model = deeplab.DeepLab(num_classes)
    solver = Detector(model,
                      train_loader,
                      vali_loader,
                      batch_size,
                      optimizer=optimizer,
                      lr=lr,
                      checkpoint_name=checkpoint_name,
                      devices=devices,
                      cov=cov,
                      num_classes=num_classes,
                      log_size=log_size)

    if checkpoint_path:
        solver.load_model(checkpoint_path)
    with SummaryWriter(comment=comment) as writer: