Exemple #1
0
    def __init__(self, dataset_opt):
        super().__init__(dataset_opt)

        pre_transform = self.pre_transform

        transform = T.Compose([
            T.FixedPoints(dataset_opt.num_points),
            T.RandomTranslate(0.01),
            T.RandomRotate(180, axis=2),
        ])

        train_dataset = S3DIS1x1(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=True,
            pre_transform=self.pre_transform,
            transform=self.train_transform,
        )
        self.test_dataset = S3DIS1x1(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=False,
            pre_transform=pre_transform,
            transform=self.test_transform,
        )

        self.train_dataset = add_weights(train_dataset, True,
                                         dataset_opt.class_weight_method)
 def __init__(self, config):
     rotations = [T.RandomRotate(180, axis=i) for i in range(3)]
     translation = T.RandomTranslate(config.augment_translate_limit)
     merge_score_noise = UnitEdgeAttrGaussianNoise(
         mu=0, sigma=config.edge_attr_noise_std)
     self.transform = T.Compose(
         [*rotations, translation, merge_score_noise])
Exemple #3
0
    def __init__(self, dataset_opt, training_opt):
        super().__init__(dataset_opt, training_opt)
        self._data_path = os.path.join(dataset_opt.dataroot, "S3DIS")

        pre_transform = self._pre_transform

        transform = T.Compose([
            T.FixedPoints(dataset_opt.num_points),
            T.RandomTranslate(0.01),
            T.RandomRotate(180, axis=2),
        ])

        train_dataset = S3DIS_With_Weights(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=True,
            pre_transform=pre_transform,
            transform=transform,
            class_weight_method=dataset_opt.class_weight_method,
        )
        test_dataset = S3DIS_With_Weights(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=False,
            pre_transform=pre_transform,
            transform=T.FixedPoints(dataset_opt.num_points),
        )

        self._create_dataloaders(train_dataset, test_dataset)
    def __init__(self, dataset_opt, training_opt):
        super().__init__(dataset_opt, training_opt)
        self._data_path = os.path.join(dataset_opt.dataroot, "S3DIS")

        pre_transform = cT.GridSampling(dataset_opt.first_subsampling, 13)
        # Select only 2^15 points from the room
        # pre_transform = T.FixedPoints(dataset_opt.room_points)

        transform = T.Compose([
            T.FixedPoints(dataset_opt.num_points),
            T.RandomTranslate(0.01),
            T.RandomRotate(180, axis=2),
        ])

        train_dataset = S3DIS_With_Weights(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=True,
            pre_transform=pre_transform,
            transform=transform,
            class_weight_method=dataset_opt.class_weight_method,
        )
        test_dataset = S3DIS_With_Weights(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=False,
            pre_transform=pre_transform,
            transform=T.FixedPoints(dataset_opt.num_points),
        )

        self._create_dataloaders(train_dataset, test_dataset, validation=None)
Exemple #5
0
    def __init__(self,
                 root: str,
                 device: torch.device = torch.device("cpu"),
                 train: bool = True,
                 test: bool = False,
                 transform_data: bool = True):

        transform = transforms.Compose([
            transforms.RandomRotate(36, axis=1),
            transforms.RandomTranslate(0.005)
        ])

        super().__init__(root=root, transform=transform)

        #         print(self.processed_paths[0])
        self.data, self.slices = torch.load(self.processed_paths[0])

        if train and not test:
            self.data, self.slices = self.collate(
                [self.get(i) for i in range(0, 80)])
        elif not train and test:
            self.data, self.slices = self.collate(
                [self.get(i) for i in range(80, 100)])
        print(self.data)
        self.class_ids = [int(c) for c in self.data.y]
Exemple #6
0
    def __init__(self,
                 root='data/ShapeNet',
                 train=True,
                 categories=None,
                 include_normals=True,
                 split='trainval',
                 transform=None,
                 pre_transform=None,
                 pre_filter=None,
                 repeat_to=None):  # Modified here to add repeat_to
        if categories is None:
            categories = list(self.category_ids.keys())
        if isinstance(categories, str):
            categories = [categories]
        assert all(category in self.category_ids for category in categories)
        self.categories = categories

        # Default settings
        pre_transform = T.NormalizeScale()
        pre_filter = None
        include_normals = True

        if train:
            transform = T.Compose([
                T.RandomTranslate(0.01),
                T.RandomRotate(15, axis=0),
                T.RandomRotate(15, axis=1),
                T.RandomRotate(15, axis=2)
            ])
            split = 'trainval'
        else:
            transform = None
            split = 'test'

        super().__init__(root, transform, pre_transform,
                         pre_filter)  # Modified here to add repeat_to

        if split == 'train':
            path = self.processed_paths[0]
        elif split == 'val':
            path = self.processed_paths[1]
        elif split == 'test':
            path = self.processed_paths[2]
        elif split == 'trainval':
            path = self.processed_paths[3]
        else:
            raise ValueError((f'Split {split} found, but expected either '
                              'train, val, trainval or test'))

        self.data, self.slices = torch.load(path)
        self.data.x = self.data.x if include_normals else None

        self.y_mask = torch.zeros((len(self.seg_classes.keys()), 50),
                                  dtype=torch.bool)
        for i, labels in enumerate(self.seg_classes.values()):
            self.y_mask[i, labels] = 1

        self.repeat_to = repeat_to  # Modified here to add repeat_to
Exemple #7
0
 def __init__(self, root, category, **kwargs):
     self.root = os.path.join(root, 'ShapeNet')
     self.category = category
     self.categories = [category]
     self.pre_transform = T.NormalizeScale()
     self.train_transform = T.Compose([
         T.RandomTranslate(0.01),
         T.RandomRotate(15, axis=0),
         T.RandomRotate(15, axis=1),
         T.RandomRotate(15, axis=2),
     ])
     self.label_parser = lambda data: data.y
Exemple #8
0
    def test_data_transforms(self):
        """
    export LD_LIBRARY_PATH=/usr/local/cuda-10.0/lib64:/usr/local/cudnn-10.0-v7.6.5.32
    proxychains python -c "from template_lib.examples.DGL.geometric.test_pytorch_geometric import TestingGeometric;\
      TestingGeometric().test_data_transforms()"

    """
        if 'CUDA_VISIBLE_DEVICES' not in os.environ:
            os.environ['CUDA_VISIBLE_DEVICES'] = '0'
        if 'PORT' not in os.environ:
            os.environ['PORT'] = '6006'
        if 'TIME_STR' not in os.environ:
            os.environ['TIME_STR'] = '0' if utils.is_debugging() else '1'
        # func name
        assert sys._getframe().f_code.co_name.startswith('test_')
        command = sys._getframe().f_code.co_name[5:]
        class_name = self.__class__.__name__[7:] \
          if self.__class__.__name__.startswith('Testing') \
          else self.__class__.__name__
        outdir = f'results/{class_name}/{command}'

        from datetime import datetime
        TIME_STR = bool(int(os.getenv('TIME_STR', 0)))
        time_str = datetime.now().strftime("%Y%m%d-%H_%M_%S_%f")[:-3]
        outdir = outdir if not TIME_STR else (outdir + '_' + time_str)
        print(outdir)

        import collections, shutil
        shutil.rmtree(outdir, ignore_errors=True)
        os.makedirs(outdir, exist_ok=True)

        import torch_geometric.transforms as T
        from torch_geometric.datasets import ShapeNet
        from template_lib.d2.data.build_points_toy import plot_points

        dataset = ShapeNet(root='datasets/shapenet', categories=['Airplane'])
        idx = -1
        plot_points(dataset[idx].pos)
        plot_points(dataset[idx].x)

        dataset = ShapeNet(root='datasets/shapenet',
                           categories=['Airplane'],
                           pre_transform=T.KNNGraph(k=6))

        dataset = ShapeNet(root='datasets/shapenet',
                           categories=['Airplane'],
                           pre_transform=T.KNNGraph(k=6),
                           transform=T.RandomTranslate(0.01))
        pass
Exemple #9
0
 def __init__(self, root, classification=False, **kwargs):
     self.root = os.path.join(root, 'ShapeNet')
     self.pre_transform = T.NormalizeScale()
     self.train_transform = T.Compose([
         RandomSamplePoints(2048),
         T.RandomTranslate(0.01),
         T.RandomRotate(15, axis=0),
         T.RandomRotate(15, axis=1),
         T.RandomRotate(15, axis=2),
     ])
     self.val_transform = RandomSamplePoints(2048)
     self.num_classes = len(self.categories)
     if not classification:
         self.label_parser = lambda data: data.y
     else:
         self.label_parser = lambda data: data.cid
Exemple #10
0
    def  __init__(self, dataset_opt, training_opt):
        super().__init__(dataset_opt, training_opt)
        self._data_path = os.path.join(dataset_opt.dataroot, 'ShapeNet')
        self._category = dataset_opt.shapenet.category
        transform = T.Compose([
            T.RandomTranslate(0.01),
            T.RandomRotate(15, axis=0),
            T.RandomRotate(15, axis=1),
            T.RandomRotate(15, axis=2)
        ])
        pre_transform = T.NormalizeScale()
        train_dataset = ShapeNet(self._data_path, self._category, train=True, transform=transform,
                                pre_transform=pre_transform)
        test_dataset = ShapeNet( self._data_path, self._category, train=False,
                                pre_transform=pre_transform)

        self.create_dataloaders(train_dataset, test_dataset, validation=None)
Exemple #11
0
    def __init__(self,
                 root: str,
                 device: torch.device = torch.device("cpu"),
                 train: bool = True,
                 test: bool = True,
                 transform_data: bool = True):
        self.url = 'http://www.cs.cf.ac.uk/shaperetrieval/shrec14/'

        if transform_data:
            # rotate and move
            transform = transforms.Compose([
                transforms.Center(),
                #                 transforms.RandomScale((0.8,1.2)),
                Rotate(dims=[1]),
                Move(mean=[0, 0, 0], std=[0.05, 0.05, 0.05]),
                transforms.RandomTranslate(0.01),
                ToDevice(device)
            ])
        else:
            transform = ToDevice(device)

        # center each mesh into its centroid
        super().__init__(root=root,
                         transform=transform,
                         pre_transform=transforms.Center())

        self.data, self.slices = torch.load(self.processed_paths[0])

        testset_slice, trainset_slice = list(range(0, 40)) + list(
            range(200, 240)), list(range(40, 200)) + list(range(240, 400))
        if train and not test:
            self.data, self.slices = self.collate(
                [self[i] for i in trainset_slice])

        elif not train and test:
            self.data, self.slices = self.collate(
                [self[i] for i in testset_slice])
Exemple #12
0
import os.path as osp

import torch
import torch.nn.functional as F
from torch_scatter import scatter
from torchmetrics.functional import jaccard_index

import torch_geometric.transforms as T
from torch_geometric.datasets import ShapeNet
from torch_geometric.loader import DataLoader
from torch_geometric.nn import MLP, DynamicEdgeConv

category = 'Airplane'  # Pass in `None` to train on all categories.
path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', 'ShapeNet')
transform = T.Compose([
    T.RandomTranslate(0.01),
    T.RandomRotate(15, axis=0),
    T.RandomRotate(15, axis=1),
    T.RandomRotate(15, axis=2)
])
pre_transform = T.NormalizeScale()
train_dataset = ShapeNet(path, category, split='trainval', transform=transform,
                         pre_transform=pre_transform)
test_dataset = ShapeNet(path, category, split='test',
                        pre_transform=pre_transform)
train_loader = DataLoader(train_dataset, batch_size=10, shuffle=True,
                          num_workers=6)
test_loader = DataLoader(test_dataset, batch_size=10, shuffle=False,
                         num_workers=6)

from torch_geometric.datasets import ShapeNet
import torch_geometric.transforms as T

dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'])
print(dataset[0])

# convert point cloud dataset into a graph dataset, create a nearest neighbor graph via transforms
dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'], pre_transform=T.KNNGraph(k=6))
print(dataset[0])

# add random noise to each node
dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'],
                   pre_transform=T.KNNGraph(k=6), transform=T.RandomTranslate(0.01))
print(dataset[0])
    return parser


if __name__ == "__main__":
    args = default_argument_parser().parse_args()
    np.random.seed(args.seed)
    num_classes = 2
    transforms = []
    if args.max_points > 0:
        transforms.append(T.FixedPoints(args.max_points))
    if args.augment:
        transforms.append(T.RandomRotate((-180, 180),
                                         axis=2))  # Rotate around z axis
        transforms.append(T.RandomFlip(0))  # Flp about x axis
        transforms.append(T.RandomFlip(1))  # Flip about y axis
        transforms.append(T.RandomTranslate(0.0001))  # Random jitter
    if args.norm:
        transforms.append(T.NormalizeScale())
    transform = T.Compose(transforms=transforms) if transforms else None
    train_dataset = EventDataset(args.dataset,
                                 "trainval",
                                 include_proton=True,
                                 task="separation",
                                 cleanliness=args.clean,
                                 pre_transform=None,
                                 transform=transform,
                                 balanced_classes=True,
                                 fraction=0.001)
    test_dataset = EventDataset(args.dataset,
                                "test",
                                include_proton=True,
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 27 09:35:12 2019

@author: dipu
"""

from torch_geometric.datasets import ShapeNet

dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'])

print(dataset[0])

import torch_geometric.transforms as T
from torch_geometric.datasets import ShapeNet

dataset = ShapeNet(root='/tmp/ShapeNet',
                   categories=['Airplane'],
                   pre_transform=T.KNNGraph(k=6))

data = dataset[0]

dataset = ShapeNet(root='/tmp/ShapeNet',
                   categories=['Airplane'],
                   pre_transform=T.KNNGraph(k=6),
                   transform=T.RandomTranslate(0.01))

data = dataset[0]
            idx = data.edge_index[0] * data.num_nodes + data.edge_index[1]
            size = list(data.edge_attr.size())
            size[0] = data.num_nodes * data.num_nodes
            edge_attr = data.edge_attr.new_zeros(size)
            edge_attr[idx] = data.edge_attr

        edge_index, edge_attr = remove_self_loops(edge_index, edge_attr)
        data.edge_attr = edge_attr
        data.edge_index = edge_index

        return data


path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', 'OMDB')
transform = T.Compose([MyTransform(), Complete(), T.Distance(norm=False), 
    T.RandomTranslate(0.01), T.RandomRotate(15, axis=0), T.RandomRotate(15, axis=1), T.RandomRotate(15, axis=2)])
dataset = OMDBXYZ(path,transform=transform).shuffle()

print(len(dataset))
dataset = dataset[dataset.data.y > 0]
print(len(dataset))

# Normalize targets to mean = 0 and std = 1.
mean = dataset.data.y.mean(dim=0, keepdim=True)
std = dataset.data.y.std(dim=0, keepdim=True)
dataset.data.y = (dataset.data.y - mean) / std
mean, std = mean.item(), std.item()

# Split datasets.
train_dataset = dataset[:9000]
val_dataset = dataset[9000:10000]
Exemple #17
0
batch_size = 8
num_workers = 6
nepochs = 25

manual_seed = 123
np.random.seed(manual_seed)
torch.manual_seed(manual_seed)
torch.cuda.manual_seed(manual_seed)


## Transform
rot_max_angle = 15
trans_max_distance = 0.01

RotTransform = GT.Compose([GT.RandomRotate(rot_max_angle, 0), GT.RandomRotate(rot_max_angle, 1), GT.RandomRotate(rot_max_angle, 2)])
TransTransform = GT.RandomTranslate(trans_max_distance)

train_transform = GT.Compose([GT.NormalizeScale(), RotTransform, TransTransform])
test_transform = GT.Compose([GT.NormalizeScale(), ])


## Dataset
train_dataset = ShapeNet2(shapenet_dataset, npoints=npoints, category=category, train=True, transform=train_transform) 
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers)
num_classes = train_dataset.num_classes

test_dataset = ShapeNet2(shapenet_dataset, npoints=npoints, category=category, train=False, transform=test_transform)
test_dataloader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers)


## Model, criterion and optimizer
Exemple #18
0
import torch_geometric.transforms as T
from torch_geometric.datasets import ShapeNet

dataset = ShapeNet(root='/media/lepoeme20/Data/graphs/ShapeNet',
                   categories=['Airplane'])

print(dataset[0])

# Convert the point cloud dataset into a graph dataset
# by generating nearest neighbor graphs from the point clouds via transforms:
transformed_dataset = ShapeNet(root='/media/lepoeme20/Data/graphs/ShapeNet',\
    categories=['Airplane'], pre_transform=T.KNNGraph(k=6), transform=T.RandomTranslate(0.01))

print(transformed_dataset[0])