Exemplo n.º 1
0
    def __init__(self,
                 root='data/ShapeNet',
                 train=True,
                 categories=None,
                 include_normals=True,
                 split='trainval',
                 transform=None,
                 pre_transform=None,
                 pre_filter=None,
                 repeat_to=None):  # Modified here to add repeat_to
        if categories is None:
            categories = list(self.category_ids.keys())
        if isinstance(categories, str):
            categories = [categories]
        assert all(category in self.category_ids for category in categories)
        self.categories = categories

        # Default settings
        pre_transform = T.NormalizeScale()
        pre_filter = None
        include_normals = True

        if train:
            transform = T.Compose([
                T.RandomTranslate(0.01),
                T.RandomRotate(15, axis=0),
                T.RandomRotate(15, axis=1),
                T.RandomRotate(15, axis=2)
            ])
            split = 'trainval'
        else:
            transform = None
            split = 'test'

        super().__init__(root, transform, pre_transform,
                         pre_filter)  # Modified here to add repeat_to

        if split == 'train':
            path = self.processed_paths[0]
        elif split == 'val':
            path = self.processed_paths[1]
        elif split == 'test':
            path = self.processed_paths[2]
        elif split == 'trainval':
            path = self.processed_paths[3]
        else:
            raise ValueError((f'Split {split} found, but expected either '
                              'train, val, trainval or test'))

        self.data, self.slices = torch.load(path)
        self.data.x = self.data.x if include_normals else None

        self.y_mask = torch.zeros((len(self.seg_classes.keys()), 50),
                                  dtype=torch.bool)
        for i, labels in enumerate(self.seg_classes.values()):
            self.y_mask[i, labels] = 1

        self.repeat_to = repeat_to  # Modified here to add repeat_to
Exemplo n.º 2
0
def transform_setup(graph_u=False,
                    graph_gcn=False,
                    rotation=180,
                    samplePoints=1024,
                    mesh=False,
                    node_translation=0.01):
    if not graph_u and not graph_gcn:
        # Default transformation for scale noralization, centering, point sampling and rotating
        pretransform = T.Compose([T.NormalizeScale(), T.Center()])
        transform = T.Compose([
            T.SamplePoints(samplePoints),
            T.RandomRotate(rotation[0], rotation[1])
        ])
        print("pointnet rotation {}".format(rotation))
    elif graph_u:
        pretransform = T.Compose([T.NormalizeScale(), T.Center()])
        transform = T.Compose([
            T.NormalizeScale(),
            T.Center(),
            T.SamplePoints(samplePoints, True, True),
            T.RandomRotate(rotation[0], rotation[1]),
            T.KNNGraph(k=graph_u)
        ])
    elif graph_gcn:

        pretransform = T.Compose([T.NormalizeScale(), T.Center()])

        if mesh:
            if mesh == "extraFeatures":
                transform = T.Compose([
                    T.RandomRotate(rotation[0], rotation[1]),
                    T.GenerateMeshNormals(),
                    T.FaceToEdge(True),
                    T.Distance(norm=True),
                    T.TargetIndegree(cat=True)
                ])  # ,
            else:
                transform = T.Compose([
                    T.RandomRotate(rotation[0], rotation[1]),
                    T.GenerateMeshNormals(),
                    T.FaceToEdge(True),
                    T.Distance(norm=True),
                    T.TargetIndegree(cat=True)
                ])
        else:
            transform = T.Compose([
                T.SamplePoints(samplePoints, True, True),
                T.KNNGraph(k=graph_gcn),
                T.Distance(norm=True)
            ])
            print("no mesh")
        print("Rotation {}".format(rotation))
        print("Meshing {}".format(mesh))

    else:
        print('no transfom')

    return transform, pretransform
Exemplo n.º 3
0
 def __init__(self, root, category, **kwargs):
     self.root = os.path.join(root, 'ShapeNet')
     self.category = category
     self.categories = [category]
     self.pre_transform = T.NormalizeScale()
     self.train_transform = T.Compose([
         T.RandomTranslate(0.01),
         T.RandomRotate(15, axis=0),
         T.RandomRotate(15, axis=1),
         T.RandomRotate(15, axis=2),
     ])
     self.label_parser = lambda data: data.y
Exemplo n.º 4
0
    def __init__(self, dataset_opt):
        super().__init__(dataset_opt)

        pre_transform = self.pre_transform

        transform = T.Compose([
            T.FixedPoints(dataset_opt.num_points),
            T.RandomTranslate(0.01),
            T.RandomRotate(180, axis=2),
        ])

        train_dataset = S3DIS1x1(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=True,
            pre_transform=self.pre_transform,
            transform=self.train_transform,
        )
        self.test_dataset = S3DIS1x1(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=False,
            pre_transform=pre_transform,
            transform=self.test_transform,
        )

        self.train_dataset = add_weights(train_dataset, True,
                                         dataset_opt.class_weight_method)
Exemplo n.º 5
0
    def __init__(self, dataset_opt, training_opt):
        super().__init__(dataset_opt, training_opt)
        self._data_path = os.path.join(dataset_opt.dataroot, "S3DIS")

        pre_transform = self._pre_transform

        transform = T.Compose([
            T.FixedPoints(dataset_opt.num_points),
            T.RandomTranslate(0.01),
            T.RandomRotate(180, axis=2),
        ])

        train_dataset = S3DIS_With_Weights(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=True,
            pre_transform=pre_transform,
            transform=transform,
            class_weight_method=dataset_opt.class_weight_method,
        )
        test_dataset = S3DIS_With_Weights(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=False,
            pre_transform=pre_transform,
            transform=T.FixedPoints(dataset_opt.num_points),
        )

        self._create_dataloaders(train_dataset, test_dataset)
Exemplo n.º 6
0
 def __init__(self, root, classification=False, **kwargs):
     self.root = os.path.join(root, 'ShapeNet')
     self.pre_transform = T.NormalizeScale()
     self.train_transform = T.Compose([
         RandomSamplePoints(2048),
         T.RandomTranslate(0.01),
         T.RandomRotate(15, axis=0),
         T.RandomRotate(15, axis=1),
         T.RandomRotate(15, axis=2),
     ])
     self.val_transform = RandomSamplePoints(2048)
     self.num_classes = len(self.categories)
     if not classification:
         self.label_parser = lambda data: data.y
     else:
         self.label_parser = lambda data: data.cid
Exemplo n.º 7
0
 def __init__(self, config):
     rotations = [T.RandomRotate(180, axis=i) for i in range(3)]
     translation = T.RandomTranslate(config.augment_translate_limit)
     merge_score_noise = UnitEdgeAttrGaussianNoise(
         mu=0, sigma=config.edge_attr_noise_std)
     self.transform = T.Compose(
         [*rotations, translation, merge_score_noise])
    def __init__(self, dataset_opt, training_opt):
        super().__init__(dataset_opt, training_opt)
        self._data_path = os.path.join(dataset_opt.dataroot, "S3DIS")

        pre_transform = cT.GridSampling(dataset_opt.first_subsampling, 13)
        # Select only 2^15 points from the room
        # pre_transform = T.FixedPoints(dataset_opt.room_points)

        transform = T.Compose([
            T.FixedPoints(dataset_opt.num_points),
            T.RandomTranslate(0.01),
            T.RandomRotate(180, axis=2),
        ])

        train_dataset = S3DIS_With_Weights(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=True,
            pre_transform=pre_transform,
            transform=transform,
            class_weight_method=dataset_opt.class_weight_method,
        )
        test_dataset = S3DIS_With_Weights(
            self._data_path,
            test_area=self.dataset_opt.fold,
            train=False,
            pre_transform=pre_transform,
            transform=T.FixedPoints(dataset_opt.num_points),
        )

        self._create_dataloaders(train_dataset, test_dataset, validation=None)
Exemplo n.º 9
0
    def __init__(self,
                 root: str,
                 device: torch.device = torch.device("cpu"),
                 train: bool = True,
                 test: bool = False,
                 transform_data: bool = True):

        transform = transforms.Compose([
            transforms.RandomRotate(36, axis=1),
            transforms.RandomTranslate(0.005)
        ])

        super().__init__(root=root, transform=transform)

        #         print(self.processed_paths[0])
        self.data, self.slices = torch.load(self.processed_paths[0])

        if train and not test:
            self.data, self.slices = self.collate(
                [self.get(i) for i in range(0, 80)])
        elif not train and test:
            self.data, self.slices = self.collate(
                [self.get(i) for i in range(80, 100)])
        print(self.data)
        self.class_ids = [int(c) for c in self.data.y]
Exemplo n.º 10
0
    def  __init__(self, dataset_opt, training_opt):
        super().__init__(dataset_opt, training_opt)
        self._data_path = os.path.join(dataset_opt.dataroot, 'ShapeNet')
        self._category = dataset_opt.shapenet.category
        transform = T.Compose([
            T.RandomTranslate(0.01),
            T.RandomRotate(15, axis=0),
            T.RandomRotate(15, axis=1),
            T.RandomRotate(15, axis=2)
        ])
        pre_transform = T.NormalizeScale()
        train_dataset = ShapeNet(self._data_path, self._category, train=True, transform=transform,
                                pre_transform=pre_transform)
        test_dataset = ShapeNet( self._data_path, self._category, train=False,
                                pre_transform=pre_transform)

        self.create_dataloaders(train_dataset, test_dataset, validation=None)
Exemplo n.º 11
0
import torch
import torch.nn.functional as F
from torch_scatter import scatter
from torchmetrics.functional import jaccard_index

import torch_geometric.transforms as T
from torch_geometric.datasets import ShapeNet
from torch_geometric.loader import DataLoader
from torch_geometric.nn import MLP, DynamicEdgeConv

category = 'Airplane'  # Pass in `None` to train on all categories.
path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', 'ShapeNet')
transform = T.Compose([
    T.RandomTranslate(0.01),
    T.RandomRotate(15, axis=0),
    T.RandomRotate(15, axis=1),
    T.RandomRotate(15, axis=2)
])
pre_transform = T.NormalizeScale()
train_dataset = ShapeNet(path, category, split='trainval', transform=transform,
                         pre_transform=pre_transform)
test_dataset = ShapeNet(path, category, split='test',
                        pre_transform=pre_transform)
train_loader = DataLoader(train_dataset, batch_size=10, shuffle=True,
                          num_workers=6)
test_loader = DataLoader(test_dataset, batch_size=10, shuffle=False,
                         num_workers=6)


class Net(torch.nn.Module):
Exemplo n.º 12
0
# ## Data loading
# Let's get the dataset

# In[2]:

import torch
from torch_geometric.datasets import ModelNet
import torch_geometric.transforms as T
import time
from tqdm import tqdm_notebook

pre_transform = T.NormalizeScale()
transform = T.Compose([
    T.SamplePoints(1024),
    T.RandomRotate(30),
    T.RandomScale((0.5, 2)),
])
name = '40'

train_ds = ModelNet(root='./',
                    train=True,
                    name=name,
                    pre_transform=pre_transform,
                    transform=transform)

test_ds = ModelNet(root='./',
                   train=True,
                   name=name,
                   pre_transform=pre_transform,
                   transform=T.SamplePoints(1024 * 4))
Exemplo n.º 13
0
        default=50,
        help="number of minibatches between logging",
    )

    return parser


if __name__ == "__main__":
    args = default_argument_parser().parse_args()
    np.random.seed(args.seed)
    num_classes = 2
    transforms = []
    if args.max_points > 0:
        transforms.append(T.FixedPoints(args.max_points))
    if args.augment:
        transforms.append(T.RandomRotate((-180, 180),
                                         axis=2))  # Rotate around z axis
        transforms.append(T.RandomFlip(0))  # Flp about x axis
        transforms.append(T.RandomFlip(1))  # Flip about y axis
        transforms.append(T.RandomTranslate(0.0001))  # Random jitter
    if args.norm:
        transforms.append(T.NormalizeScale())
    transform = T.Compose(transforms=transforms) if transforms else None
    train_dataset = EventDataset(args.dataset,
                                 "trainval",
                                 include_proton=True,
                                 task="separation",
                                 cleanliness=args.clean,
                                 pre_transform=None,
                                 transform=transform,
                                 balanced_classes=True,
                                 fraction=0.001)
Exemplo n.º 14
0
#from torch_geometric.transforms import GridSampling
import torch_geometric.transforms as T
from human_corres.data import ImgData, ImgBatch
from human_corres.config import PATH_TO_SURREAL
import human_corres.transforms as H

num_views = 20
IDlist = np.stack([
    np.arange(100000 * num_views),
    np.arange(115000 * num_views, 215000 * num_views)
],
                  axis=0)
num_test = 5000
DefaultTransform = T.Compose([
    T.Center(),
    T.RandomRotate(30, axis=0),
    T.RandomRotate(30, axis=1),
    T.RandomRotate(30, axis=2),
])


class SurrealFEPts5k(Dataset):
    """Surreal 3D points for Feature Extraction (FE).
  Samples a fixed number of points.

  Output: dictionary with keys {points3d, correspondence}
  Data Format:
    points3d: [num_points, 3] real numbers.
    correspondence: [num_points] integers in range [6890].
  """
    def __init__(self,
Exemplo n.º 15
0
    logging.basicConfig(filename=os.path.join(log_dir, 'n_mnist.log'),
                        level=logging.DEBUG)
    model_file = 'n_mnist.pkl'

    device = torch.device("cuda:" +
                          args.cuda if torch.cuda.is_available() else "cpu")

    model = model.to(device)
    optimizer = torch.optim.Adam(model.parameters(), lr=0.001)

    pre_transform = T.Compose([T.Cartesian(cat=False)])
    # train_data_aug = T.Compose([T.RandomScale([0.95, 1])])
    train_data_aug = T.Compose([
        T.RandomScale([0.95, 1]),
        T.RandomRotate((0, 10), axis=0),
        T.RandomFlip(axis=0, p=0.5)
    ])

    train_dataset = Graph_2D_Memory_Dataset(train_dir,
                                            transform=train_data_aug,
                                            pre_transform=pre_transform)
    train_loader = DataLoader(train_dataset,
                              batch_size=args.batch_size,
                              shuffle=True,
                              num_workers=args.workers)

    # train
    print("train")
    for epoch in range(1, args.epoch):
        model.train()
Exemplo n.º 16
0
            idx = data.edge_index[0] * data.num_nodes + data.edge_index[1]
            size = list(data.edge_attr.size())
            size[0] = data.num_nodes * data.num_nodes
            edge_attr = data.edge_attr.new_zeros(size)
            edge_attr[idx] = data.edge_attr

        edge_index, edge_attr = remove_self_loops(edge_index, edge_attr)
        data.edge_attr = edge_attr
        data.edge_index = edge_index

        return data


path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', 'OMDB')
transform = T.Compose([MyTransform(), Complete(), T.Distance(norm=False), 
    T.RandomTranslate(0.01), T.RandomRotate(15, axis=0), T.RandomRotate(15, axis=1), T.RandomRotate(15, axis=2)])
dataset = OMDBXYZ(path,transform=transform).shuffle()

print(len(dataset))
dataset = dataset[dataset.data.y > 0]
print(len(dataset))

# Normalize targets to mean = 0 and std = 1.
mean = dataset.data.y.mean(dim=0, keepdim=True)
std = dataset.data.y.std(dim=0, keepdim=True)
dataset.data.y = (dataset.data.y - mean) / std
mean, std = mean.item(), std.item()

# Split datasets.
train_dataset = dataset[:9000]
val_dataset = dataset[9000:10000]
Exemplo n.º 17
0
npoints = 2500  # Sample to fixed number of points when npoints > 0
batch_size = 8
num_workers = 6
nepochs = 25

manual_seed = 123
np.random.seed(manual_seed)
torch.manual_seed(manual_seed)
torch.cuda.manual_seed(manual_seed)


## Transform
rot_max_angle = 15
trans_max_distance = 0.01

RotTransform = GT.Compose([GT.RandomRotate(rot_max_angle, 0), GT.RandomRotate(rot_max_angle, 1), GT.RandomRotate(rot_max_angle, 2)])
TransTransform = GT.RandomTranslate(trans_max_distance)

train_transform = GT.Compose([GT.NormalizeScale(), RotTransform, TransTransform])
test_transform = GT.Compose([GT.NormalizeScale(), ])


## Dataset
train_dataset = ShapeNet2(shapenet_dataset, npoints=npoints, category=category, train=True, transform=train_transform) 
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers)
num_classes = train_dataset.num_classes

test_dataset = ShapeNet2(shapenet_dataset, npoints=npoints, category=category, train=False, transform=test_transform)
test_dataloader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers)

Exemplo n.º 18
0
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True)
test_loader = DataLoader(test_dataset, batch_size=args.batch_size, shuffle=True)

device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = SGCN(dim_coor=args.dim_coor,
             out_dim=args.out_dim,
             input_features=args.label_dim,
             layers_num=args.layers_num,
             model_dim=args.model_dim,
             out_channels_1=args.out_channels_1,
             dropout=args.dropout,
             use_cluster_pooling=args.use_cluster_pooling).to(device)

optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)

rotation_0 = T.RandomRotate(degrees=180, axis=0)
rotation_1 = T.RandomRotate(degrees=180, axis=1)
rotation_2 = T.RandomRotate(degrees=180, axis=2)


def train(epoch):
    model.train()

    loss_all = 0
    for data in train_loader:
        data = data.to(device)
        optimizer.zero_grad()
        if args.train_augmentation:
            data = rotation_0(data)
            data = rotation_1(data)
            data = rotation_2(data)
Exemplo n.º 19
0
npoints = 2500  # Sample to fixed number of points when npoints > 0
batch_size = 8
num_workers = 6
nepochs = 25

manual_seed = 123
np.random.seed(manual_seed)
torch.manual_seed(manual_seed)
torch.cuda.manual_seed(manual_seed)

# Transform
rot_max_angle = 15
trans_max_distance = 0.01

RotTransform = GT.Compose([
    GT.RandomRotate(rot_max_angle, 0),
    GT.RandomRotate(rot_max_angle, 1),
    GT.RandomRotate(rot_max_angle, 2)
])
TransTransform = GT.RandomTranslate(trans_max_distance)

train_transform = GT.Compose(
    [GT.NormalizeScale(), RotTransform, TransTransform])
test_transform = GT.Compose([
    GT.NormalizeScale(),
])

# Dataset
train_dataset = ShapeNet2(shapenet_dataset,
                          npoints=npoints,
                          category=category,
Exemplo n.º 20
0
def data(data_folder,
         files_ending,
         data_type,
         target_class,
         task,
         REPROCESS,
         local_features,
         global_features,
         indices,
         batch_size,
         num_workers=2,
         data_compression=None,
         data_nativeness=None,
         hemisphere=None):
    '''
    Get data loaders and data sets

    :param data_folder:
    :param files_ending:
    :param data_type:
    :param target_class:
    :param task:
    :param REPROCESS:
    :param local_features:
    :param global_features:
    :param indices:
    :param batch_size:
    :param num_workers:
    :return:
    '''

    path = osp.join(
        osp.dirname(osp.realpath(__file__)), '..', 'data/' + 'segmentation' +
        f'/{data_compression}_{data_nativeness}_{hemisphere}/{data_type}')

    # Transformations
    transform = T.Compose([
        # T.RandomTranslate(0.1),
        # T.RandomFlip(0, p=0.3),
        # T.RandomFlip(1, p=0.1),
        # T.RandomFlip(2, p=0.3),
        # T.FixedPoints(500, replace=False), #32492  16247
        T.RandomRotate(360, axis=0),
        T.RandomRotate(360, axis=1),
        T.RandomRotate(360, axis=2)
    ])

    pre_transform = T.NormalizeScale()
    print('Starting dataset processing...')
    train_dataset = OurDataset(path,
                               train=True,
                               transform=transform,
                               pre_transform=pre_transform,
                               target_class=target_class,
                               task=task,
                               reprocess=REPROCESS,
                               local_features=local_features,
                               global_feature=global_features,
                               val=False,
                               indices=indices['Train'],
                               data_folder=data_folder,
                               files_ending=files_ending)

    test_dataset = OurDataset(path,
                              train=False,
                              transform=transform,
                              pre_transform=pre_transform,
                              target_class=target_class,
                              task=task,
                              reprocess=REPROCESS,
                              local_features=local_features,
                              global_feature=global_features,
                              val=False,
                              indices=indices['Test'],
                              data_folder=data_folder,
                              files_ending=files_ending)

    validation_dataset = OurDataset(path,
                                    train=False,
                                    transform=transform,
                                    pre_transform=pre_transform,
                                    target_class=target_class,
                                    task=task,
                                    reprocess=REPROCESS,
                                    local_features=local_features,
                                    global_feature=global_features,
                                    val=True,
                                    indices=indices['Val'],
                                    data_folder=data_folder,
                                    files_ending=files_ending)

    num_labels = train_dataset.num_labels

    train_loader = DataLoader(train_dataset,
                              batch_size=batch_size,
                              shuffle=True,
                              num_workers=num_workers)
    test_loader = DataLoader(test_dataset,
                             batch_size=batch_size,
                             shuffle=False,
                             num_workers=num_workers)
    val_loader = DataLoader(validation_dataset,
                            batch_size=batch_size,
                            shuffle=False,
                            num_workers=num_workers)

    return train_dataset, test_dataset, validation_dataset, train_loader, test_loader, val_loader, num_labels