def __init__(self, dataset_opt, training_opt): super().__init__(dataset_opt, training_opt) pre_transform = self._pre_transform transform = T.Compose([ T.FixedPoints(dataset_opt.num_points), T.RandomTranslate(0.01), T.RandomRotate(180, axis=2), ]) train_dataset = S3DIS1x1( self._data_path, test_area=self.dataset_opt.fold, train=True, pre_transform=pre_transform, transform=transform, ) test_dataset = S3DIS1x1( self._data_path, test_area=self.dataset_opt.fold, train=False, pre_transform=pre_transform, transform=T.FixedPoints(dataset_opt.num_points), ) train_dataset = add_weights(train_dataset, True, dataset_opt.class_weight_method) self._create_dataloaders(train_dataset, test_dataset)
def __init__(self, dataset_opt, training_opt): super().__init__(dataset_opt, training_opt) self._data_path = os.path.join(dataset_opt.dataroot, "ShapeNet") try: self._category = dataset_opt.category except KeyError: self._category = None pre_transform = self._pre_transform train_transform = T.Compose([T.FixedPoints(dataset_opt.num_points), RandomNoise()]) test_transform = T.FixedPoints(dataset_opt.num_points) train_dataset = ShapeNet( self._data_path, self._category, include_normals=dataset_opt.normal, split="trainval", pre_transform=pre_transform, transform=train_transform, ) test_dataset = ShapeNet( self._data_path, self._category, include_normals=dataset_opt.normal, split="test", pre_transform=pre_transform, transform=test_transform, ) self._categories = train_dataset.categories self._create_dataloaders(train_dataset, test_dataset)
def __init__(self, dataset_opt, training_opt): super().__init__(dataset_opt, training_opt) self._data_path = os.path.join(dataset_opt.dataroot, "S3DIS") pre_transform = cT.GridSampling(dataset_opt.first_subsampling, 13) # Select only 2^15 points from the room # pre_transform = T.FixedPoints(dataset_opt.room_points) transform = T.Compose([ T.FixedPoints(dataset_opt.num_points), T.RandomTranslate(0.01), T.RandomRotate(180, axis=2), ]) train_dataset = S3DIS_With_Weights( self._data_path, test_area=self.dataset_opt.fold, train=True, pre_transform=pre_transform, transform=transform, class_weight_method=dataset_opt.class_weight_method, ) test_dataset = S3DIS_With_Weights( self._data_path, test_area=self.dataset_opt.fold, train=False, pre_transform=pre_transform, transform=T.FixedPoints(dataset_opt.num_points), ) self._create_dataloaders(train_dataset, test_dataset)
def augment_transforms(args): """ define transformation """ pre_transform = None if args.norm == 'scale': pre_transform = T.NormalizeScale() elif args.norm == 'bbox': pre_transform = NormalizeBox() elif args.norm == 'sphere': pre_transform = NormalizeSphere(center=True) elif args.norm == 'sphere_wo_center': pre_transform = NormalizeSphere(center=False) else: pass transform = [] # Shapenet if args.task == 'segmentation': transform.append(T.FixedPoints(args.num_pts)) # Modelnet if args.task == 'classification': transform.append(T.SamplePoints(args.num_pts)) transform = T.Compose(transform) return pre_transform, transform
def __init__(self, dataset_opt, training_opt): super().__init__(dataset_opt, training_opt) self._data_path = os.path.join(dataset_opt.dataroot, "S3DIS1x1") transform = T.Compose( [T.FixedPoints(dataset_opt.num_points), T.RandomTranslate(0.01), T.RandomRotate(180, axis=2),] ) train_dataset = S3DIS_With_Weights( self._data_path, test_area=self.dataset_opt.fold, train=True, transform=transform, class_weight_method=dataset_opt.class_weight_method, ) test_dataset = S3DIS_With_Weights( self._data_path, test_area=self.dataset_opt.fold, train=False, transform=T.FixedPoints(dataset_opt.num_points), ) self._create_dataloaders(train_dataset, test_dataset)
def getSampler(name, dataset_name): transform = None if (dataset_name == 'ShapeNet'): transform = T.FixedPoints(NUM_POINT) elif (name == 'ImportanceSampling'): transform = T.SamplePoints(NUM_POINT, remove_faces=True, include_normals=USE_NORMALS) elif (name == 'PoissonDiskSampling'): transform = PoissonDiskSampling(NUM_POINT, remove_faces=True) return transform
def load_dataset(args): # load ShapeNet dataset if args.dataset == 'shapenet': pre_transform, transform = augment_transforms(args) categories = args.categories.split(',') train_dataset = ShapeNet('../data_root/ShapeNet_normal', categories, split='trainval', include_normals=False, pre_transform=pre_transform, transform=transform) test_dataset = ShapeNet('../data_root/ShapeNet_normal', categories, split='test', include_normals=False, pre_transform=pre_transform, transform=T.FixedPoints(args.num_pts)) train_dataloader = DataLoader(train_dataset, batch_size=args.bsize, shuffle=True, num_workers=6, drop_last=True) test_dataloader = DataLoader(test_dataset, batch_size=args.bsize, shuffle=True, num_workers=6, drop_last=True) # load ModelNet dataset if args.dataset == 'modelnet': pre_transform, transform = augment_transforms(args) train_dataset = ModelNet('../data_root/ModelNet40', name='40', train=True, pre_transform=pre_transform, transform=transform) test_dataset = ModelNet('../data_root/ModelNet40', name='40', train=False, pre_transform=pre_transform, transform=T.SamplePoints(args.num_pts)) train_dataloader = DataLoader(train_dataset, batch_size=args.bsize, shuffle=True, num_workers=6, drop_last=True) test_dataloader = DataLoader(test_dataset, batch_size=args.bsize, shuffle=True, num_workers=6, drop_last=True) # load completion3D dataset if args.dataset == 'completion3D': pre_transform, transform = augment_transforms(args) categories = args.categories.split(',') train_dataset = completion3D_class('../data_root/completion3D', categories, split='train', include_normals=False, pre_transform=pre_transform, transform=transform) test_dataset = completion3D_class('../data_root/completion3D', categories, split='val', include_normals=False, pre_transform=pre_transform, transform=transform) train_dataloader = DataLoader(train_dataset, batch_size=args.bsize, shuffle=True, num_workers=8, drop_last=True) test_dataloader = DataLoader(test_dataset, batch_size=args.bsize, shuffle=False, num_workers=8, drop_last=True) return train_dataloader, test_dataloader
def augment_transforms(args): """ define transformation """ pre_transform = None if args.norm == 'scale': pre_transform = T.NormalizeScale() elif args.norm == 'sphere': pre_transform = NormalizeSphere(center=True) elif args.norm == 'sphere_wo_center': pre_transform = NormalizeSphere(center=False) else: pass transform = [] if args.dataset == 'shapenet': transform.append(T.FixedPoints(args.num_pts)) if args.dataset == 'modelnet': transform.append(T.SamplePoints(args.num_pts)) # if args.is_randRotY: # transform.append(T.RandomRotate(180, axis=1)) transform = T.Compose(transform) return pre_transform, transform
"--log-interval", type=int, default=50, help="number of minibatches between logging", ) return parser if __name__ == "__main__": args = default_argument_parser().parse_args() np.random.seed(args.seed) num_classes = 2 transforms = [] if args.max_points > 0: transforms.append(T.FixedPoints(args.max_points)) if args.augment: transforms.append(T.RandomRotate((-180, 180), axis=2)) # Rotate around z axis transforms.append(T.RandomFlip(0)) # Flp about x axis transforms.append(T.RandomFlip(1)) # Flip about y axis transforms.append(T.RandomTranslate(0.0001)) # Random jitter if args.norm: transforms.append(T.NormalizeScale()) transform = T.Compose(transforms=transforms) if transforms else None train_dataset = EventDataset(args.dataset, "trainval", include_proton=True, task="separation", cleanliness=args.clean, pre_transform=None,
path = '/home/cluster/users/erant_group/shapenet_segmentation' savepath = '/home/cluster/users/erant_group/diffops/' + expname batchSize = 20 npoints = 2048 if "slurm" in sys.argv: path = '/home/eliasof/meshfit/pytorch_geometric/data/shapenet' savepath = '/home/eliasof/meshfit/pytorch_geometric/checkpoints/' + expname train_transform = T.Compose([ T.RandomTranslate(0.01), T.RandomRotate(15, axis=0), T.RandomRotate(15, axis=1), T.RandomRotate(15, axis=2), T.FixedPoints(npoints, replace=False) ]) pre_transform, transform = T.NormalizeScale(), T.FixedPoints(npoints, replace=True) train_dataset = ShapeNet2(path, categories=str(category), split='trainval', transform=transform, pre_transform=pre_transform) test_dataset = ShapeNet2(path, categories=str(category), split='test', transform=transform, pre_transform=pre_transform) train_loader = DataLoader(train_dataset, batch_size=batchSize,
import os.path as osp import torch import torch.nn as nn import torch.nn.functional as F from torch.nn import Sequential as Seq, Dropout, Linear as Lin, ReLU, BatchNorm1d as BN from torch_geometric.datasets import ShapeNet import torch_geometric.transforms as T from torch_geometric.data import DataListLoader from torch_geometric.nn import DynamicEdgeConv, DataParallel, global_max_pool from torch_geometric.utils import intersection_and_union as i_and_u category = None # None # 'Airplane' path = 'ShapeNet' transform = T.Compose([ T.FixedPoints(2048), ]) pre_transform = T.NormalizeScale() train_dataset = ShapeNet(path, category, train=True, transform=transform, pre_transform=pre_transform) test_dataset = ShapeNet(path, category, train=False, transform=transform, pre_transform=pre_transform) train_loader = DataListLoader(train_dataset, batch_size=32, shuffle=True, num_workers=32,
z_sig = self.sig_conv(x, pos, edge_index) z_mu = global_max_pool(z_mu, batch) z_sig = global_max_pool(z_sig, batch) z_sig = z_sig.clamp(max=10) if self.training: z = z_mu + torch.randn_like(z_sig) * torch.exp(z_sig) else: z = z_mu out = F.relu(self.fc3(z)) out = self.fc4(out) out = out.reshape((1024 * data.y.size(0), 3)) return out, z_mu, z_sig, z path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data/ModelNet10') pre_transform, transform = T.NormalizeScale(), T.FixedPoints(1024) train_dataset = ModelNet(path, '10', True, transform, pre_transform) test_dataset = ModelNet(path, '10', False, transform, pre_transform) train_loader = DataLoader(train_dataset, batch_size=10, shuffle=True, num_workers=1) test_loader = DataLoader(test_dataset, batch_size=10, shuffle=False, num_workers=1) device = torch.device('cuda:3' if torch.cuda.is_available() else 'cpu') model = Net(0.5, 0.2).to(device) optimizer = torch.optim.Adam(model.parameters(), lr=0.001)