def get_loader(config): # set the data loader train_transforms = transforms.Compose([ d_utils.PointcloudToTensor(), d_utils.PointcloudRandomRotate(x_range=config.x_angle_range, y_range=config.y_angle_range, z_range=config.z_angle_range), d_utils.PointcloudScaleAndJitter( scale_low=config.scale_low, scale_high=config.scale_high, std=config.noise_std, clip=config.noise_clip, augment_symmetries=config.augment_symmetries), ]) test_transforms = transforms.Compose([ d_utils.PointcloudToTensor(), ]) train_dataset = S3DISSeg(input_features_dim=config.input_features_dim, subsampling_parameter=config.sampleDl, color_drop=config.color_drop, in_radius=config.in_radius, num_points=config.num_points, num_steps=config.num_steps, num_epochs=config.epochs, data_root=config.data_root, transforms=train_transforms, split='train') val_dataset = S3DISSeg(input_features_dim=config.input_features_dim, subsampling_parameter=config.sampleDl, color_drop=config.color_drop, in_radius=config.in_radius, num_points=config.num_points, num_steps=config.num_steps, num_epochs=20, data_root=config.data_root, transforms=test_transforms, split='val') train_sampler = torch.utils.data.distributed.DistributedSampler( train_dataset, shuffle=False) train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=config.batch_size, shuffle=False, num_workers=config.num_workers, pin_memory=True, sampler=train_sampler, drop_last=True) val_sampler = torch.utils.data.distributed.DistributedSampler( val_dataset, shuffle=False) val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=config.batch_size, shuffle=False, num_workers=config.num_workers, pin_memory=True, sampler=val_sampler, drop_last=False) return train_loader, val_loader
def get_loader(args): # set the data loader train_transforms = transforms.Compose([ d_utils.PointcloudToTensor(), d_utils.PointcloudScaleAndJitter(scale_low=config.scale_low, scale_high=config.scale_high, std=config.noise_std, clip=config.noise_clip), ]) test_transforms = transforms.Compose([d_utils.PointcloudToTensor()]) train_dataset = PartNetSeg(input_features_dim=config.input_features_dim, data_root=args.data_root, transforms=train_transforms, split='train') val_dataset = PartNetSeg(input_features_dim=config.input_features_dim, data_root=args.data_root, transforms=test_transforms, split='val') test_dataset = PartNetSeg(input_features_dim=config.input_features_dim, data_root=args.data_root, transforms=test_transforms, split='test') train_sampler = torch.utils.data.distributed.DistributedSampler( train_dataset) train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True, sampler=train_sampler, drop_last=True) val_sampler = torch.utils.data.distributed.DistributedSampler( val_dataset, shuffle=False) test_sampler = torch.utils.data.distributed.DistributedSampler( test_dataset, shuffle=False) val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True, sampler=val_sampler, drop_last=False) test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True, sampler=test_sampler, drop_last=False) return train_loader, val_loader, test_loader
def get_loader(args): # set the data loader test_transforms = transforms.Compose([d_utils.PointcloudToTensor()]) val_dataset = PartNetSeg(input_features_dim=config.input_features_dim, data_root=args.data_root, transforms=test_transforms, split='val') test_dataset = PartNetSeg(input_features_dim=config.input_features_dim, data_root=args.data_root, transforms=test_transforms, split='test') val_sampler = torch.utils.data.distributed.DistributedSampler( val_dataset, shuffle=False) test_sampler = torch.utils.data.distributed.DistributedSampler( test_dataset, shuffle=False) val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True, sampler=val_sampler, drop_last=False) test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True, sampler=test_sampler, drop_last=False) return val_loader, test_loader
def get_loader(args): test_transforms = transforms.Compose([ d_utils.PointcloudToTensor() ]) test_dataset = ModelNet40Cls(input_features_dim=config.input_features_dim, num_points=args.num_points, data_root=args.data_root, transforms=test_transforms, subsampling_parameter=config.sampleDl, split='test') test_sampler = torch.utils.data.distributed.DistributedSampler(test_dataset, shuffle=False) test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True, sampler=test_sampler, drop_last=False) return test_loader
def get_loader(config): test_transforms = transforms.Compose([ d_utils.PointcloudToTensor() ]) val_dataset = S3DISSemSeg(input_features_dim=config.input_features_dim, subsampling_parameter=config.sampleDl, color_drop=config.color_drop, in_radius=config.in_radius, num_points=config.num_points, num_steps=config.num_steps, num_epochs=20, transforms=test_transforms, split='val') val_sampler = torch.utils.data.distributed.DistributedSampler(val_dataset, shuffle=False) val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=config.batch_size, shuffle=False, num_workers=config.num_workers, pin_memory=True, sampler=val_sampler, drop_last=False) return val_loader
def get_loader(args): # set the data loader test_transforms = transforms.Compose([d_utils.PointcloudToTensor()]) test_dataset = ShapeNetPartSeg(num_points=args.num_points, data_root=args.data_root, transforms=test_transforms, split='test') test_sampler = torch.utils.data.distributed.DistributedSampler( test_dataset, shuffle=False) test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True, sampler=test_sampler, drop_last=False) return test_loader