def prepare_dataloader(data_directory, mode, augment_parameters, do_augmentation, batch_size, size, num_workers, filenames = None, load_gt = False, load_predition = False, args = None): data_transform = image_transforms( mode=mode, augment_parameters=augment_parameters, do_augmentation=do_augmentation, size = size) if filenames is None: data_dirs = os.listdir(data_directory) datasets = [KittiLoader(os.path.join(data_directory, data_dir), mode, transform=data_transform) for data_dir in data_dirs] dataset = ConcatDataset(datasets) else: data_dir = data_directory dataset = KittiLoader(data_dir, mode, transform=data_transform, filenames = filenames, load_gt = load_gt, load_predition = load_predition, args = args) n_img = len(dataset) print('Use a dataset with', n_img, 'images') if mode == 'train': loader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True) else: loader = DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers, pin_memory=True) return n_img, loader
def prepare_dataloader(data_directory, mode, augment_parameters, do_augmentation, batch_size, size, num_workers): data_dirs = os.listdir(data_directory) #aa=os.path.join(data_directory,data_dir) for data_dir in data_dirs # print(data_directory,data_dirs,111111111111111111111111111111111111111111111111) data_transform = image_transforms(mode=mode, augment_parameters=augment_parameters, do_augmentation=do_augmentation, size=size) # print('ffffffffffff',data_directory,data_dirs) datasets = [ KittiLoader(os.path.join(data_directory, data_dir), mode, transform=data_transform) for data_dir in data_dirs ] dataset = ConcatDataset(datasets) n_img = len(dataset) print('Use a dataset with', n_img, 'images') loader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers) return n_img, loader
def prepare_dataloader(data_directory, augment_parameters, do_augmentation, batch_size, size, num_workers, shuffle=True, drop_last=True): data_transform = image_transforms(augment_parameters=augment_parameters, do_augmentation=do_augmentation, size=size) dataset = ConcatDataset([ KITTI(os.path.join(data_directory, data_dir), transform=data_transform) for data_dir in os.listdir(data_directory) ]) n_img = len(dataset) print('Use a dataset with', n_img, 'instances') print('Batch size', batch_size) print('Drop last', drop_last) loader = DataLoader(dataset, batch_size=batch_size, drop_last=drop_last, shuffle=shuffle, num_workers=num_workers, pin_memory=True) return n_img, loader
def prepare_dataloader(data_directory, mode, augment_parameters, do_augmentation, batch_size, size, num_workers, shuffle=True): data_transform = image_transforms(mode=mode, augment_parameters=augment_parameters, do_augmentation=do_augmentation, size=size) dataset = KITTI(data_directory, mode, transform=data_transform) n_img = len(dataset) print('Use a dataset with', n_img, 'images (mode={})'.format(mode)) if mode == 'train': loader = DataLoader(dataset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers, pin_memory=True) else: loader = DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers, pin_memory=True) return n_img, loader
def prepare_dataloader(data_directory, mode, augment_parameters, do_augmentation, batch_size, size, num_workers,split,filename): data_dirs = os.listdir(data_directory) data_transform = image_transforms( mode=mode, augment_parameters=augment_parameters, do_augmentation=do_augmentation, size = size) if split=='kitti': datasets = [KittiLoader(os.path.join(data_directory, data_dir), mode, transform=data_transform) for data_dir in data_dirs] # 考虑datasets是多个数据loader组成的list,通过ConcatDataset对其进行合并成一个整合loader dataset = ConcatDataset(datasets) n_img = len(dataset) print('KITTI: Use a dataset with', n_img, 'images') elif split=='eigen': dataset = KittiLoader_Eigen(root_dir=data_directory,root_filename = filename, mode = mode,transform=data_transform) n_img = len(dataset) print('EIGEN: Use a dataset with', n_img, 'images') else: print('Wrong split') pass if mode == 'train': loader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True) else: loader = DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers, pin_memory=True) return n_img, loader
def prepare_dataloader(data_directory, mode, augment_parameters, do_augmentation, batch_size, size, num_workers): data_dirs = os.listdir(data_directory) data_transform = image_transforms(mode=mode, augment_parameters=augment_parameters, do_augmentation=do_augmentation, size=size) """ datasets = [KittiLoader(os.path.join(data_directory, data_dir), mode, transform=data_transform) for data_dir in data_dirs] ; """ datasets = [KaistLoader(data_directory, mode, transform=data_transform)] dataset = ConcatDataset(datasets) n_img = len(dataset) print('Use a dataset with', n_img, 'images') if mode == 'train': loader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True) else: loader = DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers, pin_memory=True) return n_img, loader
def prepare_dataloader(data_directory, mode, augment_parameters, do_augmentation, batch_size, size, num_workers, train): # data_dirs = os.listdir(data_directory) data_transform = image_transforms( mode=mode, augment_parameters=augment_parameters, do_augmentation=do_augmentation, size=size, ) if mode == 'val' or mode == 'train': datasets = [ SegLoader(os.path.join(data_directory, i), mode, transform=data_transform) for i in os.listdir(data_directory) ] dataset = ConcatDataset(datasets) elif mode == 'test': dataset = SimulationLoader('data/test/', mode, transform=data_transform) # if mode == 'val' or mode == 'train': # dataset = EndoscopyLoader(data_directory, mode, transform=data_transform) # # dataset = SimulationLoader(data_directory, mode, transform=data_transform) # # dataset = ArthroscopyLoader(data_directory, mode, transform=data_transform) # elif mode == 'test': # dataset = EndoscopyLoader(data_directory, mode, transform=data_transform) # # dataset = SimulationLoader(data_directory, mode, transform=data_transform) # # dataset = ArthroscopyLoader(data_directory, mode, transform=data_transform) n_img = len(dataset) print('Use a dataset with', n_img, 'images') if mode == 'val': loader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True, drop_last=True) elif mode == 'train': loader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True, drop_last=True) else: loader = DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers, pin_memory=True) return n_img, loader
def prepare_dataloader_orthanchdf5( data_directory, mode, augment_parameters, do_augmentation, batch_size, size, num_workers, labels=(True, False), ): data_transform = image_transforms( mode=mode, augment_parameters=augment_parameters, do_augmentation=do_augmentation, size=size, ) dataset = OrthancData(data_directory, mode=mode, transform=data_transform, labels=labels) if mode == "train": loader = DataLoader( dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True, ) else: loader = DataLoader( dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers, pin_memory=True, ) return len(dataset), loader
def prepare_multi_dataloader(data_directory, mode, augment_parameters, do_augmentation, batch_size, size, num_workers, train): seg_dir = data_directory + 'segmentation/' simu_dir = data_directory + 'simulation' arth_dir = data_directory + 'endoscopy/' data_transform = image_transforms( mode=mode, augment_parameters=augment_parameters, do_augmentation=do_augmentation, size=size, ) seg_datasets = [ SegLoader(os.path.join(seg_dir, i), mode, transform=data_transform) for i in os.listdir(seg_dir) ] seg_dataset = ConcatDataset(seg_datasets) simulation_dataset = EndoscopyLoader(arth_dir, mode, transform=data_transform) # sampler = BatchSampler(random_sampler, batch_size, drop_last=True) # arthroscopy_dataset = EndoscopyLoader(arth_dir, mode, transform=data_transform) # if mode == 'val' or mode == 'train': # seg_datasets = [SegLoader(os.path.join(seg_dir, i), mode, transform=data_transform) for i in # os.listdir(seg_dir)] # seg_dataset = ConcatDataset(seg_datasets) # simulation_dataset = SimulationLoader(simu_dir, mode, transform=data_transform) # elif mode == 'test': # seg_dataset = SegLoader(seg_dir, mode, transform=data_transform) # simulation_dataset = SimulationLoader(data_directory, mode, transform=data_transform) seg_n_img = len(seg_dataset) # simu_n_img = len(simulation_dataset) arth_n_img = len(simulation_dataset) print('{} mode, {} images'.format(mode, seg_n_img + arth_n_img)) if mode == 'val' or mode == 'train': seg_loader = DataLoader(seg_dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True, drop_last=True) arth_loader = DataLoader(simulation_dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True, drop_last=True) else: seg_loader = DataLoader(seg_dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers, pin_memory=True) arth_loader = DataLoader(simulation_dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers, pin_memory=True) return [arth_n_img, seg_n_img], [arth_loader, seg_loader]