Beispiel #1
0
def get_data():
    dataset = args.name
    path = '../data/geometric/ShapeNet-Bag'
    trainset = ShapeNet(path, "Bag")
    testset = ShapeNet(path, "Bag")
    lenTrain = len(trainset)
    lenTest = len(testset)

    print("Len Dataset:", lenTrain)
    trainLoader = DataLoader(trainset[:lenTrain], batch_size=1, shuffle=False)
    testloader = DataLoader(trainset[:lenTest], batch_size=1, shuffle=False)
    print("Len TrainLoader:", len(trainLoader))

    return trainLoader, testloader
Beispiel #2
0
    def test_data_transforms(self):
        """
    export LD_LIBRARY_PATH=/usr/local/cuda-10.0/lib64:/usr/local/cudnn-10.0-v7.6.5.32
    proxychains python -c "from template_lib.examples.DGL.geometric.test_pytorch_geometric import TestingGeometric;\
      TestingGeometric().test_data_transforms()"

    """
        if 'CUDA_VISIBLE_DEVICES' not in os.environ:
            os.environ['CUDA_VISIBLE_DEVICES'] = '0'
        if 'PORT' not in os.environ:
            os.environ['PORT'] = '6006'
        if 'TIME_STR' not in os.environ:
            os.environ['TIME_STR'] = '0' if utils.is_debugging() else '1'
        # func name
        assert sys._getframe().f_code.co_name.startswith('test_')
        command = sys._getframe().f_code.co_name[5:]
        class_name = self.__class__.__name__[7:] \
          if self.__class__.__name__.startswith('Testing') \
          else self.__class__.__name__
        outdir = f'results/{class_name}/{command}'

        from datetime import datetime
        TIME_STR = bool(int(os.getenv('TIME_STR', 0)))
        time_str = datetime.now().strftime("%Y%m%d-%H_%M_%S_%f")[:-3]
        outdir = outdir if not TIME_STR else (outdir + '_' + time_str)
        print(outdir)

        import collections, shutil
        shutil.rmtree(outdir, ignore_errors=True)
        os.makedirs(outdir, exist_ok=True)

        import torch_geometric.transforms as T
        from torch_geometric.datasets import ShapeNet
        from template_lib.d2.data.build_points_toy import plot_points

        dataset = ShapeNet(root='datasets/shapenet', categories=['Airplane'])
        idx = -1
        plot_points(dataset[idx].pos)
        plot_points(dataset[idx].x)

        dataset = ShapeNet(root='datasets/shapenet',
                           categories=['Airplane'],
                           pre_transform=T.KNNGraph(k=6))

        dataset = ShapeNet(root='datasets/shapenet',
                           categories=['Airplane'],
                           pre_transform=T.KNNGraph(k=6),
                           transform=T.RandomTranslate(0.01))
        pass
def load_dataset(path,
                 transform=None,
                 pre_transform=None,
                 pre_filter=None,
                 category=None,
                 name='10',
                 test_area=6):
    if path.name == 'ShapeNet':
        train_dataset = ShapeNet(path,
                                 category,
                                 split='trainval',
                                 transform=transform,
                                 pre_transform=pre_transform,
                                 pre_filter=pre_filter)
        test_dataset = ShapeNet(path,
                                category,
                                split='test',
                                transform=transform,
                                pre_transform=pre_transform,
                                pre_filter=pre_filter)
    elif path.name == 'ModelNet':
        train_dataset = ModelNet(path,
                                 name=name,
                                 train=True,
                                 transform=transform,
                                 pre_transform=pre_transform,
                                 pre_filter=pre_filter)
        test_dataset = ModelNet(path,
                                name=name,
                                train=False,
                                transform=transform,
                                pre_transform=pre_transform,
                                pre_filter=pre_filter)
    elif path.name == 'S3DIS':
        train_dataset = S3DIS(path,
                              test_area=test_area,
                              train=True,
                              transform=transform,
                              pre_transform=pre_transform,
                              pre_filter=pre_filter)
        test_dataset = S3DIS(path,
                             test_area=test_area,
                             train=False,
                             transform=transform,
                             pre_transform=pre_transform,
                             pre_filter=pre_filter)

    return train_dataset, test_dataset
Beispiel #4
0
def data_transforms():
    dataset = 'ShapeNet'
    path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..',
                        'data', dataset)
    dataset = ShapeNet(root=path, categories=['Airplane'])
    print(dataset[0])
    dataset = ShapeNet(root=path,
                       categories=['Airplane'],
                       pre_transform=T.KNNGraph(k=6))
    print(dataset[0])
    dataset = ShapeNet(root=dataset,
                       categories=['Airplane'],
                       pre_transform=T.KNNGraph(k=6),
                       transform=T.RandomTranslate(0.01))
    print(dataset[0])
    pass
Beispiel #5
0
    def __init__(self, root_dir, category, train=True, transform=None, npoints=2500):
        categories = ['Airplane', 'Bag', 'Cap', 'Car', 'Chair', 'Earphone', 'Guitar',
            'Knife', 'Lamp', 'Laptop', 'Motorbike', 'Mug', 'Pistol', 'Rocket', 'Skateboard', 'Table']
        # assert os.path.exists(root_dir) 
        assert category in categories

        self.npoints = npoints
        self.dataset = ShapeNet(root_dir, category, train, transform)
Beispiel #6
0
    def  __init__(self, dataset_opt, training_opt):
        super().__init__(dataset_opt, training_opt)
        self._data_path = os.path.join(dataset_opt.dataroot, 'ShapeNet')
        self._category = dataset_opt.shapenet.category
        transform = T.Compose([
            T.RandomTranslate(0.01),
            T.RandomRotate(15, axis=0),
            T.RandomRotate(15, axis=1),
            T.RandomRotate(15, axis=2)
        ])
        pre_transform = T.NormalizeScale()
        train_dataset = ShapeNet(self._data_path, self._category, train=True, transform=transform,
                                pre_transform=pre_transform)
        test_dataset = ShapeNet( self._data_path, self._category, train=False,
                                pre_transform=pre_transform)

        self.create_dataloaders(train_dataset, test_dataset, validation=None)
Beispiel #7
0
def load_dataset(args):

    # load ShapeNet dataset
    if args.dataset == 'shapenet':
        pre_transform, transform = augment_transforms(args)

        categories = args.categories.split(',')
        train_dataset = ShapeNet('../data_root/ShapeNet_normal', categories, split='trainval', include_normals=False,
                                 pre_transform=pre_transform, transform=transform)
        test_dataset = ShapeNet('../data_root/ShapeNet_normal', categories, split='test', include_normals=False,
                                pre_transform=pre_transform, transform=T.FixedPoints(args.num_pts))
        train_dataloader = DataLoader(train_dataset, batch_size=args.bsize, shuffle=True,
                                      num_workers=6, drop_last=True)
        test_dataloader = DataLoader(test_dataset, batch_size=args.bsize, shuffle=True,
                                     num_workers=6, drop_last=True)

    # load ModelNet dataset
    if args.dataset == 'modelnet':
        pre_transform, transform = augment_transforms(args)

        train_dataset = ModelNet('../data_root/ModelNet40', name='40', train=True,
                                 pre_transform=pre_transform, transform=transform)
        test_dataset = ModelNet('../data_root/ModelNet40', name='40', train=False,
                                 pre_transform=pre_transform, transform=T.SamplePoints(args.num_pts))
        train_dataloader = DataLoader(train_dataset, batch_size=args.bsize, shuffle=True,
                                      num_workers=6, drop_last=True)
        test_dataloader = DataLoader(test_dataset, batch_size=args.bsize, shuffle=True,
                                     num_workers=6, drop_last=True)

    # load completion3D dataset
    if args.dataset == 'completion3D':
        pre_transform, transform = augment_transforms(args)

        categories = args.categories.split(',')
        train_dataset = completion3D_class('../data_root/completion3D', categories, split='train',
                            include_normals=False, pre_transform=pre_transform, transform=transform)
        test_dataset = completion3D_class('../data_root/completion3D', categories, split='val',
                            include_normals=False, pre_transform=pre_transform, transform=transform)
        train_dataloader = DataLoader(train_dataset, batch_size=args.bsize, shuffle=True,
                                      num_workers=8, drop_last=True)
        test_dataloader = DataLoader(test_dataset, batch_size=args.bsize, shuffle=False,
                                     num_workers=8, drop_last=True)

    return train_dataloader, test_dataloader
Beispiel #8
0
    def __init__(self,
                 n_sweeps=2,
                 n_points=512,
                 max_distance=None,
                 flow_dim=3,
                 **kwargs):
        """
        Args:
        kwargs:
        """
        self.n_sweeps = n_sweeps
        self.n_points = n_points
        self.max_distance = max_distance
        self.flow_dim = flow_dim
        self.occlusion = kwargs.get("occlusion", True)
        self.partition = kwargs.get("partition", "train")
        self.overfit = kwargs.get("overfit", False)
        self.data_size = kwargs.get("data_size", 4242)
        if (self.data_size is None) or (self.data_size < 1):
            self.data_size = 4242
        self.dataset_len = 4242 if self.partition == "train" else 242
        self.max_objects = 10

        self.odds = 2 if self.partition == "train" else 10

        # root_path = "../../../media/deepstorage01/datasets_external/"
        root_path = "data/"

        self.dataset = ShapeNet(
            root=root_path,
            # transform=T.SamplePoints(n_sweeps * n_points),
            # pre_transform=T.Cartesian(),
            split=self.partition)

        self.loader = DataLoader_geo(self.dataset, batch_size=1, shuffle=True)

        # Initialize kwargs
        self.random_transition = kwargs.get("random_transition", False)
        self.acceleration = kwargs.get("acceleration", False)
        self.rotate = kwargs.get("rotate", True)

        # set some needed values:
        self.sigma_transition = 0.1 * self.random_transition  # defines the variance of the transitions
        self.sigma_pc = np.minimum(1.0, np.maximum(
            self.n_points / 10.0, 0.1))  # variance to generate the point cloud
        self.a = 1 + self.acceleration  # defines if speed is constant or if acceleration is constant.

        self.cache = {}
Beispiel #9
0
import torch_geometric.transforms as T
from torch_geometric.datasets import ShapeNet
from torch_geometric.loader import DataLoader
from torch_geometric.nn import MLP, DynamicEdgeConv

category = 'Airplane'  # Pass in `None` to train on all categories.
path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', 'ShapeNet')
transform = T.Compose([
    T.RandomTranslate(0.01),
    T.RandomRotate(15, axis=0),
    T.RandomRotate(15, axis=1),
    T.RandomRotate(15, axis=2)
])
pre_transform = T.NormalizeScale()
train_dataset = ShapeNet(path, category, split='trainval', transform=transform,
                         pre_transform=pre_transform)
test_dataset = ShapeNet(path, category, split='test',
                        pre_transform=pre_transform)
train_loader = DataLoader(train_dataset, batch_size=10, shuffle=True,
                          num_workers=6)
test_loader = DataLoader(test_dataset, batch_size=10, shuffle=False,
                         num_workers=6)


class Net(torch.nn.Module):
    def __init__(self, out_channels, k=30, aggr='max'):
        super().__init__()

        self.conv1 = DynamicEdgeConv(MLP([2 * 6, 64, 64]), k, aggr)
        self.conv2 = DynamicEdgeConv(MLP([2 * 64, 64, 64]), k, aggr)
        self.conv3 = DynamicEdgeConv(MLP([2 * 64, 64, 64]), k, aggr)
from torch_geometric.datasets import ShapeNet
import torch_geometric.transforms as T

dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'])
print(dataset[0])

# convert point cloud dataset into a graph dataset, create a nearest neighbor graph via transforms
dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'], pre_transform=T.KNNGraph(k=6))
print(dataset[0])

# add random noise to each node
dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'],
                   pre_transform=T.KNNGraph(k=6), transform=T.RandomTranslate(0.01))
print(dataset[0])
Beispiel #11
0
import torch_geometric.transforms as T
from torch_geometric.datasets import ShapeNet

dataset = ShapeNet(root='/media/lepoeme20/Data/graphs/ShapeNet',
                   categories=['Airplane'])

print(dataset[0])

# Convert the point cloud dataset into a graph dataset
# by generating nearest neighbor graphs from the point clouds via transforms:
transformed_dataset = ShapeNet(root='/media/lepoeme20/Data/graphs/ShapeNet',\
    categories=['Airplane'], pre_transform=T.KNNGraph(k=6), transform=T.RandomTranslate(0.01))

print(transformed_dataset[0])
Beispiel #12
0
from torch_geometric.utils import mean_iou

from pointnet2_classification import MLP

category = 'Airplane'
path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', 'ShapeNet')
transform = T.Compose([
    T.RandomTranslate(0.01),
    T.RandomRotate(15, axis=0),
    T.RandomRotate(15, axis=1),
    T.RandomRotate(15, axis=2)
])
pre_transform = T.NormalizeScale()
train_dataset = ShapeNet(path,
                         category,
                         train=True,
                         transform=transform,
                         pre_transform=pre_transform)
test_dataset = ShapeNet(path,
                        category,
                        train=False,
                        pre_transform=pre_transform)
train_loader = DataLoader(train_dataset,
                          batch_size=10,
                          shuffle=True,
                          num_workers=6)
test_loader = DataLoader(test_dataset,
                         batch_size=10,
                         shuffle=False,
                         num_workers=6)

# ================================== DATA TRANSFORMS ===============================================

# ransforms are a common way in torchvision to transform images and perform augmentation. PyTorch
# Geometric comes with its own transforms, which expect a Data object as input and return a new
# transformed Data object. Transforms can be chained together using
# torch_geometric.transforms.Compose and are applied before saving a processed dataset
# (pre_transform) on disk or before accessing a graph in a dataset (transform).

# Let’s look at an example, where we apply transforms on the ShapeNet dataset (containing 17,000
# 3D shape point clouds and per point labels from 16 shape categories).

from torch_geometric.datasets import ShapeNet

dataset = ShapeNet(root='/tmp/ShapeNet', category='Airplane')
print(data[0])  # Data(pos=[2518, 3], y=[2518])

# We can convert the point cloud dataset into a graph dataset by generating nearest neighbor graphs
# from the point clouds via transforms:

import torch_geometric.transforms as T
from torch_geometric.datasets import ShapeNet

dataset = ShapeNet(root='/tmp/ShapeNet', category='Airplane', pre_transform=T.NNGraph(k=6))
print(data[0])  # Data(edge_index=[2, 17768], pos=[2518, 3], y=[2518])

# In addition, we can use the transform argument to randomly augment a Data object,
# e.g. translating each node position by a small number:

import torch_geometric.transforms as T
perm = torch.randperm(len(dataset))
dataset = dataset[perm]
dataset = Planetoid(root='./data/Cora', name='Cora')
"""
-----------Mini-batches------------ 
"""
dataset = TUDataset(root='./data/ENZYMES', name='ENZYMES')
loader = DataLoader(dataset, batch_size=32, shuffle=True)
for i, data in enumerate(loader):
    # In the first batch, there are 1047 nodes (belong to the 32 graphs (i.e.,32 batches)), [1047, 3]
    print(data.x.size())
"""
-----------Data Transforms------------ 
"""
# Only datasets
dataset = ShapeNet(root='./data/ShapeNet', categories=['Airplane'])
print(dataset[0])  # >>> Data(pos=[2518, 3], y=[2518])
# Constucting the Graph with neighbor
dataset = ShapeNet(
    root='./data/ShapeNet',
    categories=['Airplane'],
    pre_transform=T.KNNGraph(
        k=6))  # >>> Data(edge_index=[2, 15108], pos=[2518, 3], y=[2518])
# translate each node position by a small number:
dataset = ShapeNet(root='/tmp/ShapeNet',
                   categories=['Airplane'],
                   pre_transform=T.KNNGraph(k=6),
                   transform=T.RandomTranslate(0.01))
"""
-----------Learning Methods on Graphs---------------
Let's implement a two-layer GCN
Beispiel #15
0
from torch_geometric.datasets import ShapeNet
from torch_geometric.transforms import KNNGraph

# # --------读取的是点云数据------------
# point_dataset = ShapeNet(root='../datasets/ShapeNet', categories=['Airplane'])
# print(point_dataset)
#
# point_data = point_dataset[0]
# print(point_data)
#
# print('\n')

# --------利用transform转换成图数据-----------
graph_dataset = ShapeNet(root='../datasets/ShapeNet',
                         categories=['Airplane'],
                         pre_transform=KNNGraph(k=6))
print(graph_dataset)

graph_data = graph_dataset[0]
print(graph_data)

# Tips: 两个数据集读取,都会在数据集文件夹下生成processed文件,所以每次换读取方式记得删除文件
def LoadDataset(dataset_train,
                dataset_test,
                pre_transform_train=None,
                transform_train=None,
                pre_transform_test=None,
                transform_test=None,
                category=None):
    path_train = osp.join(DATASET_PATH, dataset_test)
    path_test = osp.join(DATASET_PATH, dataset_train)

    if not osp.exists(path_train):
        os.makedirs(path_train)

    if not osp.exists(path_test):
        os.makedirs(path_test)

    _train_dataset = None
    if (dataset_train == 'ModelNet10'):
        _train_dataset = ModelNet(path_train, '10', True, transform_train,
                                  pre_transform_train)

    elif (dataset_train == 'ModelNet40'):
        _train_dataset = ModelNet(path_train, '40', True, transform_train,
                                  pre_transform_train)

    elif (dataset_train == 'PoissonModelNet40'):
        _train_dataset = PoissonModelNet(path_train, '40', True)

    elif (dataset_train == 'PoissonModelNet10'):
        _train_dataset = PoissonModelNet(path_train, '10', True)

    elif (dataset_train == 'ShapeNet'):
        _train_dataset = ShapeNet(path_train,
                                  category,
                                  train=True,
                                  transform=transform_train,
                                  pre_transform=pre_transform_train)

    _test_dataset = None
    if (dataset_test == 'ModelNet10'):
        _test_dataset = ModelNet(path_test, '10', False, transform_test,
                                 pre_transform_test)

    elif (dataset_test == 'ModelNet40'):
        _test_dataset = ModelNet(path_test, '40', False, transform_test,
                                 pre_transform_test)

    elif (dataset_test == 'PoissonModelNet40'):
        _test_dataset = PoissonModelNet(path_test, '40', False)

    elif (dataset_test == 'PoissonModelNet10'):
        _test_dataset = PoissonModelNet(path_test, '10', False)

    elif (dataset_test == 'ShapeNet'):
        _test_dataset = ShapeNet(path_test,
                                 category,
                                 train=False,
                                 pre_transform=pre_transform_test)

    if _train_dataset is None or _test_dataset is None:
        print("Invalid dataset requested!")
        return (None, None)
    else:
        print('Dataset Train: {}'.format(dataset_train))
        logging.info('Dataset Train: {}'.format(dataset_train))

        print('Dataset Test: {}'.format(dataset_test))
        logging.info('Dataset Test: {}'.format(dataset_test))

        return (_train_dataset, _test_dataset)