コード例 #1
0
ファイル: experiments.py プロジェクト: fclairec/geometric-ifc
def transform_setup(graph_u=False,
                    graph_gcn=False,
                    rotation=180,
                    samplePoints=1024,
                    mesh=False,
                    node_translation=0.01):
    if not graph_u and not graph_gcn:
        # Default transformation for scale noralization, centering, point sampling and rotating
        pretransform = T.Compose([T.NormalizeScale(), T.Center()])
        transform = T.Compose([
            T.SamplePoints(samplePoints),
            T.RandomRotate(rotation[0], rotation[1])
        ])
        print("pointnet rotation {}".format(rotation))
    elif graph_u:
        pretransform = T.Compose([T.NormalizeScale(), T.Center()])
        transform = T.Compose([
            T.NormalizeScale(),
            T.Center(),
            T.SamplePoints(samplePoints, True, True),
            T.RandomRotate(rotation[0], rotation[1]),
            T.KNNGraph(k=graph_u)
        ])
    elif graph_gcn:

        pretransform = T.Compose([T.NormalizeScale(), T.Center()])

        if mesh:
            if mesh == "extraFeatures":
                transform = T.Compose([
                    T.RandomRotate(rotation[0], rotation[1]),
                    T.GenerateMeshNormals(),
                    T.FaceToEdge(True),
                    T.Distance(norm=True),
                    T.TargetIndegree(cat=True)
                ])  # ,
            else:
                transform = T.Compose([
                    T.RandomRotate(rotation[0], rotation[1]),
                    T.GenerateMeshNormals(),
                    T.FaceToEdge(True),
                    T.Distance(norm=True),
                    T.TargetIndegree(cat=True)
                ])
        else:
            transform = T.Compose([
                T.SamplePoints(samplePoints, True, True),
                T.KNNGraph(k=graph_gcn),
                T.Distance(norm=True)
            ])
            print("no mesh")
        print("Rotation {}".format(rotation))
        print("Meshing {}".format(mesh))

    else:
        print('no transfom')

    return transform, pretransform
コード例 #2
0
    def test_data_transforms(self):
        """
    export LD_LIBRARY_PATH=/usr/local/cuda-10.0/lib64:/usr/local/cudnn-10.0-v7.6.5.32
    proxychains python -c "from template_lib.examples.DGL.geometric.test_pytorch_geometric import TestingGeometric;\
      TestingGeometric().test_data_transforms()"

    """
        if 'CUDA_VISIBLE_DEVICES' not in os.environ:
            os.environ['CUDA_VISIBLE_DEVICES'] = '0'
        if 'PORT' not in os.environ:
            os.environ['PORT'] = '6006'
        if 'TIME_STR' not in os.environ:
            os.environ['TIME_STR'] = '0' if utils.is_debugging() else '1'
        # func name
        assert sys._getframe().f_code.co_name.startswith('test_')
        command = sys._getframe().f_code.co_name[5:]
        class_name = self.__class__.__name__[7:] \
          if self.__class__.__name__.startswith('Testing') \
          else self.__class__.__name__
        outdir = f'results/{class_name}/{command}'

        from datetime import datetime
        TIME_STR = bool(int(os.getenv('TIME_STR', 0)))
        time_str = datetime.now().strftime("%Y%m%d-%H_%M_%S_%f")[:-3]
        outdir = outdir if not TIME_STR else (outdir + '_' + time_str)
        print(outdir)

        import collections, shutil
        shutil.rmtree(outdir, ignore_errors=True)
        os.makedirs(outdir, exist_ok=True)

        import torch_geometric.transforms as T
        from torch_geometric.datasets import ShapeNet
        from template_lib.d2.data.build_points_toy import plot_points

        dataset = ShapeNet(root='datasets/shapenet', categories=['Airplane'])
        idx = -1
        plot_points(dataset[idx].pos)
        plot_points(dataset[idx].x)

        dataset = ShapeNet(root='datasets/shapenet',
                           categories=['Airplane'],
                           pre_transform=T.KNNGraph(k=6))

        dataset = ShapeNet(root='datasets/shapenet',
                           categories=['Airplane'],
                           pre_transform=T.KNNGraph(k=6),
                           transform=T.RandomTranslate(0.01))
        pass
コード例 #3
0
    def process(self):
        seq_b = pd.read_csv('data/sequential_background.csv')
        scalar_b = pd.read_csv('data/scalar_background.csv')


        data_list = []
        i = 0
        for index, sca in scalar_b.iterrows():
            tmp_event = seq_b.loc[seq_b['event_no'] == sca['event_no']]
            x = torch.tensor(tmp_event[['dom_charge','dom_time','dom_x','dom_y','dom_z']].to_numpy(),dtype=torch.float) #Features
            pos = torch.tensor(tmp_event[['dom_x','dom_y','dom_z']].to_numpy(),dtype=torch.float) #Position
            y = torch.tensor(sca[sca.keys()[2:]].to_numpy(),dtype=torch.float) #Target
            dat = Data(x=x,edge_index=None,edge_attr=None,y=y,pos=pos) 
            T.KNNGraph(loop=True)(dat) #defining edges by k-NN with k=6
            data_list.append(dat)
            if i % 1000 == 0:
                print(i)
            i += 1

        if self.pre_filter is not None:
            data_list = [data for data in data_list if self.pre_filter(data)]

        if self.pre_transform is not None:
            data_list = [self.pre_transform(data) for data in data_list]

        # print(data_list)
        data, slices = self.collate(data_list)
        torch.save((data,slices), self.processed_paths[0])

# MyOwnDataset(root = 'C:/Users/jv97/Desktop/github/Neutrino-Machine-Learning/dataset_background')

#Could be done without all this, but the collate method is nessecary and torch.save
コード例 #4
0
 def __init__(self,
              descriptor_dim,
              sampler=None,
              split='train',
              transform=DefaultTransform,
              build_graph=False,
              cls=False):
     super(FaustFEPts5k).__init__()
     self.name = 'FaustFEPts5k'
     self.IDlist = np.arange(10000)
     self.split = split
     if self.split == 'train':
         raise RuntimeError("This dataset is Test Only")
     elif self.split == 'test':
         self.IDlist = self.IDlist
     elif self.split == 'val':
         self.IDlist = self.IDlist[:40]
     self.file_path = '{}/faust/scans/{{0:03d}}_{{0:03d}}.mat'.format(
         PATH_TO_DATA)
     self.template_feats = helper.loadSMPLDescriptors()[:, :descriptor_dim]
     self.template_points = helper.loadSMPLModels()[0].verts
     self.pre_transform = None  #T.NormalizeScale()
     self.cls = cls
     if build_graph:
         self.transform = T.Compose(
             [transform, T.KNNGraph(k=6),
              T.ToDense(5000)])
     else:
         self.transform = T.Compose([transform, T.ToDense(5000)])
コード例 #5
0
 def __init__(self,
              descriptor_dim,
              sampler=None,
              split='train',
              transform=DefaultTransform,
              cls=False,
              build_graph=False):
     super(SurrealFEPts5k).__init__()
     self.name = 'SurrealFEPts5k'
     self.split = split
     if self.split == 'train':
         self.IDlist = IDlist[:, :-(num_test * num_views)].reshape(-1)
     elif self.split == 'test':
         self.IDlist = IDlist[:, -(num_test * num_views):].reshape(-1)
     elif self.split == 'val':
         self.IDlist = IDlist[:, :num_views].reshape(-1)
     self.file_path = '{}/scans/{{0:06d}}/{{1:03d}}.mat'.format(
         PATH_TO_SURREAL)
     self.template_feats = helper.loadSMPLDescriptors()[:, :descriptor_dim]
     self.template_points = helper.loadSMPLModels()[0].verts
     self.cls = cls
     if build_graph:
         self.transform = T.Compose(
             [transform, T.KNNGraph(k=6),
              T.ToDense(5000)])
     else:
         self.transform = T.Compose([transform, T.ToDense(5000)])
コード例 #6
0
ファイル: graph_reg.py プロジェクト: imatge-upc/munegc
    def __init__(self,
                 n_neigh=9,
                 rad_neigh=0.1,
                 knn=None,
                 self_loop=True,
                 edge_attr=None,
                 flow='source_to_target'):
        super(GraphReg, self).__init__()
        # defining graph transform
        graph_transform_list = []
        self.del_edge_attr = False
        self.knn = knn
        self.n_neigh = n_neigh
        self.rad_neigh = rad_neigh
        self.self_loop = self_loop
        self.edge_attr = edge_attr
        if self.knn == True:
            graph_transform_list.append(
                T.KNNGraph(n_neigh, loop=self_loop, flow=flow))

        elif self.knn == False:
            graph_transform_list.append(
                T.RadiusGraph(self.rad_neigh,
                              loop=self_loop,
                              max_num_neighbors=n_neigh,
                              flow=flow))
        else:
            print("Connectivity of the graph will not be re-generated")

        # edge attr
        if edge_attr is not None:
            self.del_edge_attr = True
            if type(edge_attr) == str:
                if edge_attr:
                    edge_attr = [attr.strip() for attr in edge_attr.split('-')]
                else:
                    edge_attr = []
            for attr in edge_attr:
                attr = attr.strip().lower()

                if attr == 'poscart':
                    graph_transform_list.append(Cartesian(norm=False,
                                                          cat=True))

                elif attr == 'posspherical':
                    graph_transform_list.append(Spherical(cat=True))

                elif attr == 'featureoffsets':
                    graph_transform_list.append(
                        FeatureDistances(metric='offset', cat=True))

                elif attr == 'featurel2':
                    graph_transform_list.append(
                        FeatureDistances(metric='l2', cat=True))

                else:
                    raise RuntimeError('{} is not supported'.format(attr))
        self.graph_transform = T.Compose(graph_transform_list)
コード例 #7
0
def save_graphs(feat_dir, images_list, graph_files_dir):
    constructor = T.KNNGraph(k=6, force_undirected=True)
    for i in progressbar.progressbar(range(0, len(images_list))):
        name = images_list[i]
        feat_path = os.path.join(feat_dir, name)
        dict_feats = torch.load(feat_path)
        data = Data(pos=dict_feats['norm_rois'])
        data = constructor(data)
        torch.save(data, os.path.join(graph_files_dir, name))
コード例 #8
0
def constr_graph(feat_dir, images_list, graph_files_dir):
    constructor = T.KNNGraph(k=6, force_undirected=True)
    res = []
    for i in progressbar.progressbar(range(0, len(images_list))):
        name = images_list[i]
        feat_path = os.path.join(feat_dir, name)
        dict_feats = torch.load(feat_path)
        data = Data(x=dict_feats['pooled_feat'], pos=dict_feats['norm_rois'])
        res.append(constructor(data))
    return res
def main():
    # ------------
    # args
    # ------------
    parser = ArgumentParser()
    parser.add_argument("--batch_size", default=64, type=int)
    parser.add_argument("--num_workers", default=2, type=int)
    parser.add_argument("--task", default="off_center", type=str)
    parser = pl.Trainer.add_argparse_args(parser)
    args = parser.parse_args()

    # ------------
    # data
    # ------------
    data_dir = Path(gvp.__file__).parents[1] / "data/synthetic"

    transform = transforms.Compose([transforms.KNNGraph(k=10), ExtendedPPF()])
    dm = SyntheticDataModule(
        data_dir, args.batch_size, args.task, transform, num_workers=args.num_workers
    )

    # ------------
    # model
    # ------------
    model = SyntheticGNN(4, 32, 4, 32)

    # ------------
    # training
    # ------------
    wandb_logger = WandbLogger(
        name=f"SyntheticGNN-{args.task}", project="GVP", reinit=True
    )
    checkpoint_callback = ModelCheckpoint(
        monitor="val_loss",
        dirpath="model_checkpoints",
        filename=f"SyntheticGNN-{args.task}-" + "{epoch:02d}-{val_loss:.2f}",
        save_weights_only=True,
        save_top_k=3,
        mode="min",
    )
    trainer = pl.Trainer.from_argparse_args(
        args,
        logger=wandb_logger,
        callbacks=[checkpoint_callback],
    )
    trainer.fit(model, dm)

    # ------------
    # testing
    # ------------
    result = trainer.test(datamodule=dm)
    print(result)

    wandb.finish()
コード例 #10
0
 def __init__(self, root_path, transform=None, pre_transform=None):
     super(GFTE_POS_DATASET, self).__init__(root_path, transform,
                                            pre_transform)
     self.root_path = root_path
     self.json_file_list = os.listdir(os.path.join(root_path, "structure"))
     #清洗数据,基于json 开始,通过json文件名判断是否有相应的img文件,再通过json与chunk判断数据的有效性和合法性,实际就是看是否json与chunk数据匹配
     self.imglist = os.listdir(os.path.join(root_path, "testtrain"))  #img
     #此处是使用check后的list
     self.imglist = self.check_all()
     self.graph_transform = GT.KNNGraph(
         k=6
     )  #使用pos创建k-nn图 Creates a k-NN graph based on node positions :obj:`pos`.
コード例 #11
0
ファイル: dataset1.py プロジェクト: zoujuny/GFTE
 def __init__(self, root_path, transform=None, pre_transform=None):
     super(ScitsrDataset, self).__init__(root_path, transform,
                                         pre_transform)
     self.root_path = root_path
     self.jsonfile = os.path.join(self.root_path, "imglist.json")
     if os.path.exists(self.jsonfile):  # imglist.json去掉了一些有疑问的文件
         with open(self.jsonfile, "r") as read_file:
             self.imglist = json.load(read_file)
     else:
         self.imglist = list(
             filter(
                 lambda fn: fn.lower().endswith('.jpg') or fn.lower().
                 endswith('.png'),
                 os.listdir(os.path.join(self.root_path, "img"))))
         self.imglist = self.check_all()
         with open(self.jsonfile, "w") as write_file:
             json.dump(self.imglist, write_file)
     self.graph_transform = GT.KNNGraph(k=6)
コード例 #12
0
ファイル: dataset2.py プロジェクト: rgoparaju/GFTE
 def __init__(self, root_path, transform=None, pre_transform=None):
     super(ScitsrDataset, self).__init__(root_path, transform,
                                         pre_transform)
     self.root_path = root_path
     self.jsonfile = os.path.join(self.root_path, "imglist.json")
     self.img_size = 256
     self.kernel = np.ones((3, 3), np.uint8)  # 把图像的线变粗一点
     if os.path.exists(self.jsonfile):  # imglist.json去掉了一些有疑问的文件
         with open(self.jsonfile, "r") as read_file:
             self.imglist = json.load(read_file)
     else:
         self.imglist = list(
             filter(
                 lambda fn: fn.lower().endswith('.jpg') or fn.lower().
                 endswith('.png'),
                 os.listdir(
                     os.path.join(self.root_path,
                                  "/content/scitsr_data/train/img"))))
         self.imglist = self.check_all()
         with open(self.jsonfile, "w") as write_file:
             json.dump(self.imglist, write_file)
     self.graph_transform = GT.KNNGraph(k=6)
コード例 #13
0
ファイル: datasetloader.py プロジェクト: Mkarami3/GNN_FEM
    def load(config):
        '''
        input: data path to vtk simulation files
        return: a list includes path to individual path to each file
        '''
        FORCE_MEAN, FORCE_STD = DatasetLoader.dic_to_np(config.json_force)
        DISP_MEAN, DISP_STD = DatasetLoader.dic_to_np(config.json_disp)

        print("FORCE_MEAN", FORCE_STD)
        print("DISP_STD", DISP_STD)

        dataset = []
        knn = T.KNNGraph(k=6)
        print('[INFO] Loading dataset ...')
        for file_path in sorted(os.listdir(config.data_path)):

            # print('[INFO] Reading Folder named: {}'.format(folder))
            if file_path.split('.')[-1] == 'vtk':

                full_path = os.path.join(config.data_path, file_path)

                mesh_pv = pv.read(full_path)
                force = mesh_pv.point_arrays['externalForce']
                disp = mesh_pv.point_arrays['computedDispl']

                force_norm = (force - FORCE_MEAN) / FORCE_STD
                disp_norm = (disp - DISP_MEAN) / DISP_STD

                point_torch = torch.from_numpy(mesh_pv.points)
                disp_torch = torch.from_numpy(disp_norm)  #labels
                force_torch = torch.from_numpy(force_norm)  #node features

                data = Data(x=force_torch, y=disp_torch, pos=point_torch)

                data = knn(data)
                dataset.append(data)

        return dataset
コード例 #14
0
        x = torch.cat([torch.tensor(tmp_event[['charge_log10','time']].values,dtype=torch.float),x_pos,y_pos,z_pos],dim=1)
        pos = torch.cat([x_pos,y_pos,z_pos],dim=1)
    else:
        x = torch.tensor(tmp_event[['charge_log10','time','dom_x','dom_y','dom_z']].values,dtype=torch.float) #Features
        pos = torch.tensor(tmp_event[['dom_x','dom_y','dom_z']].values,dtype=torch.float) #Position

    query = "SELECT energy_log10, time, position_x, position_y, position_z, direction_x, direction_y, direction_z, azimuth, zenith FROM truth WHERE event_no = {}".format(event_no)
    y = pd.read_sql(query,con)

    y = torch.tensor(y.values,dtype=torch.float) #Target

    dat = Data(x=x,edge_index=None,edge_attr=None,y=y,pos=pos) 
    
#     T.KNNGraph(loop=True)(dat) #defining edges by k-NN with k=6 !!! Make sure .pos is not scaled!!! ie. x,y,z  -!-> ax,by,cz
    
    T.KNNGraph(k=6, loop=False, force_undirected = False)(dat)
    dat.adj_t = None
    T.ToUndirected()(dat)
    T.AddSelfLoops()(dat)
    (row, col) = dat.edge_index
    dat.edge_index = torch.stack([col,row],dim=0)
    
    data_list.append(dat)

    if (i+1) % subdivides == 0:
        data, slices = InMemoryDataset.collate(data_list)
        torch.save((data,slices), destination + '/{}k_{}{}.pt'.format(subdivides//1000,save_filename,subset))
        subset += 1
        data_list = [] #Does this free up the memory?
    
    if i % 500 == 0:
コード例 #15
0
        if self.transform is not None:
            data_s = self.transform(data_s)
            data_t = self.transform(data_t)

        data = Data(num_nodes=pos_s.size(0))
        for key in data_s.keys:
            data['{}_s'.format(key)] = data_s[key]
        for key in data_t.keys:
            data['{}_t'.format(key)] = data_t[key]

        return data


transform = T.Compose([
    T.Constant(),
    T.KNNGraph(k=8),
    T.Cartesian(),
])
train_dataset = RandomGraphDataset(30, 60, 0, 20, transform=transform)
train_loader = DataLoader(train_dataset, args.batch_size, shuffle=True,
                          follow_batch=['x_s', 'x_t'])

path = osp.join('..', 'data', 'PascalPF')
test_datasets = [PascalPF(path, cat, transform) for cat in PascalPF.categories]

device = 'cuda' if torch.cuda.is_available() else 'cpu'
psi_1 = SplineCNN(1, args.dim, 2, args.num_layers, cat=False, dropout=0.0)
psi_2 = SplineCNN(args.rnd_dim, args.rnd_dim, 2, args.num_layers, cat=True,
                  dropout=0.0)
model = DGMC_modified_v2(psi_1, psi_2, num_steps=args.num_steps).to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)
コード例 #16
0
 def getKNNConstructor(k=6, force_undirected=True):
     return T.KNNGraph(k=k, force_undirected=force_undirected)
コード例 #17
0
import torch_geometric.transforms as T
from torch_geometric.datasets import ShapeNet

dataset = ShapeNet(root='/media/lepoeme20/Data/graphs/ShapeNet',
                   categories=['Airplane'])

print(dataset[0])

# Convert the point cloud dataset into a graph dataset
# by generating nearest neighbor graphs from the point clouds via transforms:
transformed_dataset = ShapeNet(root='/media/lepoeme20/Data/graphs/ShapeNet',\
    categories=['Airplane'], pre_transform=T.KNNGraph(k=6), transform=T.RandomTranslate(0.01))

print(transformed_dataset[0])
コード例 #18
0
dataset = TUDataset(root='./data/ENZYMES', name='ENZYMES')
loader = DataLoader(dataset, batch_size=32, shuffle=True)
for i, data in enumerate(loader):
    # In the first batch, there are 1047 nodes (belong to the 32 graphs (i.e.,32 batches)), [1047, 3]
    print(data.x.size())
"""
-----------Data Transforms------------ 
"""
# Only datasets
dataset = ShapeNet(root='./data/ShapeNet', categories=['Airplane'])
print(dataset[0])  # >>> Data(pos=[2518, 3], y=[2518])
# Constucting the Graph with neighbor
dataset = ShapeNet(
    root='./data/ShapeNet',
    categories=['Airplane'],
    pre_transform=T.KNNGraph(
        k=6))  # >>> Data(edge_index=[2, 15108], pos=[2518, 3], y=[2518])
# translate each node position by a small number:
dataset = ShapeNet(root='/tmp/ShapeNet',
                   categories=['Airplane'],
                   pre_transform=T.KNNGraph(k=6),
                   transform=T.RandomTranslate(0.01))
"""
-----------Learning Methods on Graphs---------------
Let's implement a two-layer GCN
"""
# Load the datasets: a specific datasets format
dataset = Planetoid(root='./data/Cora', name='Cora')  # Only one Graph

# datasets中含有的一些属性: dataset.num_classes, dataset.num_node_features, dataset.num_edge_features, dataset.num_features

コード例 #19
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 27 09:35:12 2019

@author: dipu
"""

from torch_geometric.datasets import ShapeNet

dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'])

print(dataset[0])

import torch_geometric.transforms as T
from torch_geometric.datasets import ShapeNet

dataset = ShapeNet(root='/tmp/ShapeNet',
                   categories=['Airplane'],
                   pre_transform=T.KNNGraph(k=6))

data = dataset[0]

dataset = ShapeNet(root='/tmp/ShapeNet',
                   categories=['Airplane'],
                   pre_transform=T.KNNGraph(k=6),
                   transform=T.RandomTranslate(0.01))

data = dataset[0]
コード例 #20
0
from torch_geometric.datasets import ShapeNet
import torch_geometric.transforms as T

dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'])
print(dataset[0])

# convert point cloud dataset into a graph dataset, create a nearest neighbor graph via transforms
dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'], pre_transform=T.KNNGraph(k=6))
print(dataset[0])

# add random noise to each node
dataset = ShapeNet(root='/tmp/ShapeNet', categories=['Airplane'],
                   pre_transform=T.KNNGraph(k=6), transform=T.RandomTranslate(0.01))
print(dataset[0])
コード例 #21
0
 def __init__(self, pos_list: list):
     super(PRED_DATASET, self).__init__()
     self.graph_transform = GT.KNNGraph(k=6)
     self.pos_list = pos_list