Esempio n. 1
0
def run(args):
    dataset = dataloader.VOC12ImageDataset(args.train_list, voc12_root=args.voc12_root, img_normal=None, to_torch=False)
    dataset = torchutils.split_dataset(dataset, args.num_workers)

    print('[ ', end='')
    multiprocessing.spawn(_work, nprocs=args.num_workers, args=(dataset, args), join=True)
    print(']')
Esempio n. 2
0
def run(args):
    model = getattr(importlib.import_module(args.cam_network), 'CAM')()
    model.load_state_dict(torch.load(args.cam_weights_name + '.pth'), strict=True)
    model.eval()

    n_gpus = torch.cuda.device_count()

    dataset = dataloader.VOC12ClassificationDatasetMSF(args.train_list,
                                                       voc12_root=args.voc12_root, scales=args.cam_scales)
    dataset = torchutils.split_dataset(dataset, n_gpus)

    print('[ ', end='')
    multiprocessing.spawn(_work, nprocs=n_gpus, args=(model, dataset, args), join=True)
    print(']')

    torch.cuda.empty_cache()
Esempio n. 3
0
def run(args):
    model = getattr(importlib.import_module(args.irn_network),
                    'EdgeDisplacement')()
    model.load_state_dict(torch.load(args.irn_weights_name), strict=False)
    model.eval()

    n_gpus = torch.cuda.device_count()

    dataset = dataloader.VOC12ClassificationDatasetMSF(
        args.infer_list, voc12_root=args.voc12_root, scales=(1.0, ))
    dataset = torchutils.split_dataset(dataset, n_gpus)

    print("[ ", end='')
    multiprocessing.spawn(_work,
                          nprocs=n_gpus,
                          args=(model, dataset, args),
                          join=True)
    print("]")