Example #1
0
    def process(self):
        folders = sorted(glob(osp.join(self.raw_dir, 'FaceTalk_*')))
        if len(folders) == 0:
            extract_zip(self.raw_paths[0], self.raw_dir, log=False)
            folders = sorted(glob(osp.join(self.raw_dir, 'FaceTalk_*')))

        train_data_list, test_data_list = [], []
        for folder in folders:
            for i, category in enumerate(self.categories):
                files = sorted(glob(osp.join(folder, category, '*.ply')))
                for j, f in enumerate(files):
                    data = read_ply(f)
                    data.y = torch.tensor([i], dtype=torch.long)
                    if self.pre_filter is not None and\
                       not self.pre_filter(data):
                        continue
                    if self.pre_transform is not None:
                        data = self.pre_transform(data)

                    if (j % 100) < 90:
                        train_data_list.append(data)
                    else:
                        test_data_list.append(data)

        torch.save(self.collate(train_data_list), self.processed_paths[0])
        torch.save(self.collate(test_data_list), self.processed_paths[1])
Example #2
0
    def process(self):
        extract_zip(self.raw_paths[0], self.raw_dir, log=False)

        path = osp.join(self.raw_dir, 'meshes')
        path = osp.join(path, 'tr_reg_{0:03d}.ply')
        data_list = []
        for i in progressbar.progressbar(range(100)):
            data = read_ply(path.format(i))
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data_list.append(data)

        torch.save(self.collate(data_list[:80]), self.processed_paths[0])
        torch.save(self.collate(data_list[80:]), self.processed_paths[1])

        shutil.rmtree(osp.join(self.raw_dir, 'meshes'))
        shutil.rmtree(osp.join(self.raw_dir, 'segs'))
Example #3
0
    def process(self):
        extract_zip(self.raw_paths[0], self.raw_dir, log=False)

        path = osp.join(self.raw_dir, 'MPI-FAUST', 'training', 'registrations')
        path = osp.join(path, 'tr_reg_{0:03d}.ply')
        data_list = []
        for i in range(100):
            data = read_ply(path.format(i))
            data.y = torch.tensor([i % 10], dtype=torch.long)
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data_list.append(data)

        torch.save(self.collate(data_list[:80]), self.processed_paths[0])
        torch.save(self.collate(data_list[80:]), self.processed_paths[1])

        shutil.rmtree(osp.join(self.raw_dir, 'MPI-FAUST'))
Example #4
0
    def process(self):
        extract_zip(self.raw_paths[0], self.raw_dir, log=False)

        path = osp.join(self.raw_dir, 'shapes')
        path = osp.join(path, 'tr_reg_{0:03d}.ply')
        data_list = []
        file_idx = 0
        for i in progressbar.progressbar(range(100)):
            data = read_ply(path.format(i))
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data_list.append(data)
            if i == 79 or i == 99:
                torch.save(self.collate(data_list),
                           self.processed_paths[file_idx])
                data_list = []
                file_idx += 1

        shutil.rmtree(osp.join(self.raw_dir, 'shapes'))
Example #5
0
    def process(self):
        path = osp.join(self.root, model_dir, 'tr_reg_{0:03d}.ply')
        txt_path = osp.join(self.root, data_dir, 'tr_reg_{0:03d}.txt')

        data_list = []
        for i in range(100):
            data = read_ply(path.format(i))
            data.y = torch.tensor([i % 10], dtype=torch.long)
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data.x = torch.tensor(np.loadtxt(txt_path.format(i)), dtype=torch.float32)

            Winn = sio.loadmat(txt_path.format(i)[:-3] + 'mat')
            data.V = torch.tensor(Winn['V'], dtype=torch.float32)
            data.A = torch.tensor(Winn['A'], dtype=torch.float32)
            data.D = torch.tensor(Winn['D'], dtype=torch.float32)
            data.clk = torch.tensor(Winn['clk'], dtype=torch.float32)

            data.name = int(path.format(i).split('/')[-1].split('.')[0].split('_')[-1])
            data_list.append(data)

        percentage_valtest = percentage_val + percentage_test
        test_of_valtest = percentage_test / percentage_valtest

        model_ids = list(range(100))
        train_ids, valtest_ids = train_test_split(model_ids, test_size=percentage_valtest)
        val_ids, test_ids = train_test_split(valtest_ids, test_size=test_of_valtest)
        train_list = []
        test_list = []
        for i in range(100):
            if i in train_ids:
                train_list.append(data_list[i])
            elif i in test_ids:
                test_list.append(data_list[i])

        torch.save(self.collate(train_list), self.processed_paths[0])
        torch.save(self.collate(test_list), self.processed_paths[1])
from torch_geometric.io import read_obj, read_ply
from quad_mesh_simplify import simplify_mesh
from mayavi import mlab
from time import time

lion = read_ply('./test_data/Lion.ply')
print(lion)

pos = lion.pos
face = lion.face
mlab.triangular_mesh(pos[:, 0], pos[:, 1], pos[:, 2], face.t())
mlab.show()

start = time()
res_pos, res_face = simplify_mesh(pos.numpy().astype('double'),
                                  face.numpy().T.astype('uint32'), 100)
print('needed', time() - start, 'sec')

mlab.triangular_mesh(res_pos[:, 0], res_pos[:, 1], res_pos[:, 2], res_face)
mlab.show()
Example #7
0
    def process_set(self, dataset):

        categories = self.raw_file_names
        print("The following classes are processed {}".format(categories))

        # check if a feature file exists for the originating ifc file (e.g. volume, area, neighborhood)
        feature_files = glob.glob(
            os.path.join(self.raw_dir, '*ifcfeatures_all.csv'))
        print(feature_files)
        if feature_files:
            print("global features considered")
            glob_feat = True
        else:
            print("no global features found")
            glob_feat = False

        all_features = pd.DataFrame()
        for ff in feature_files:
            df = pd.read_csv(ff, index_col=0)
            all_features = all_features.append(df)
        all_features = all_features[~all_features.index.duplicated(
            keep='first')]

        data_list = []
        path_list = []

        for target, category in enumerate(categories):
            folder = osp.join(self.raw_dir, category, dataset)
            paths = glob.glob('{}/*.ply'.format(folder))

            for i, path in enumerate(paths):
                path_list.append(path)

                data = read_ply(path)
                label = list(self.classmap.keys())[list(
                    self.classmap.values()).index(category)]
                data.y = torch.tensor([label])

                if glob_feat:
                    feat_ind = category + "_" + path.split('/')[-1][:22]

                    feat_ind = category + "_" + path.split('/')[-1][:22]
                    try:
                        feature = all_features.loc[feat_ind]
                        # neigh=all_neighbours.loc[feat_ind]
                    except:
                        # print("no features found so not considered")
                        continue

                    try:

                        x1 = torch.tensor(
                            [np.log(feature['compactness'] + 1)] *
                            1024).reshape(-1, 1)
                        x2 = torch.tensor([feature['isWall']] * 1024,
                                          dtype=torch.float).reshape(-1, 1)
                        x3 = torch.tensor([feature['isStair']] * 1024,
                                          dtype=torch.float).reshape(-1, 1)
                        x4 = torch.tensor([feature['isSlab']] * 1024,
                                          dtype=torch.float).reshape(-1, 1)
                        x5 = torch.tensor([feature['isFurn']] * 1024,
                                          dtype=torch.float).reshape(-1, 1)
                        x6 = torch.tensor([feature['isCol']] * 1024,
                                          dtype=torch.float).reshape(-1, 1)
                        x7 = torch.tensor([feature['isFlowT']] * 1024,
                                          dtype=torch.float).reshape(-1, 1)
                        x8 = torch.tensor([feature['isFlowS']] * 1024,
                                          dtype=torch.float).reshape(-1, 1)
                        x9 = torch.tensor([feature['isFlowF']] * 1024,
                                          dtype=torch.float).reshape(-1, 1)
                        x10 = torch.tensor([feature['isFlowC']] * 1024,
                                           dtype=torch.float).reshape(-1, 1)
                        x11 = torch.tensor([feature['isDist']] * 1024,
                                           dtype=torch.float).reshape(-1, 1)
                        x12 = torch.tensor([feature['isWin']] * 1024,
                                           dtype=torch.float).reshape(-1, 1)
                        x13 = torch.tensor([feature['isDoor']] * 1024,
                                           dtype=torch.float).reshape(-1, 1)

                        x14 = torch.tensor([np.log(feature['Volume'] + 1)] *
                                           1024,
                                           dtype=torch.float).reshape(-1, 1)
                        x15 = torch.tensor([np.log(feature['Area'] + 1)] *
                                           1024,
                                           dtype=torch.float).reshape(-1, 1)
                    except:
                        print("not all features found skipping this ob")

                    # neighborhood considered or not
                    data.x = torch.cat([
                        x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13,
                        x14, x15
                    ],
                                       dim=1)
                    # data.x = torch.cat([x14, x15], dim=1)

                    # check consistency
                    assert data.y is not None
                    assert data.x is not None

                data_list.append(data)

        if self.pre_filter is not None:
            data_list = [d for d in data_list if self.pre_filter(d)]

        if self.pre_transform is not None:
            data_list2 = []
            for i, d in enumerate(data_list):
                d = self.pre_transform(d)
                data_list2.append(d)

            data_list = data_list2

        return self.collate(data_list)
Example #8
0
    def process(self):
        print('Extracting zip...')
        extract_zip(self.raw_paths[0], self.raw_dir, log=False)
        shapeseg_path = osp.join(self.raw_dir, 'ShapeSeg')

        data_list = []
        #Adobe
        print('Processing Adobe')
        adobe_path = osp.join(shapeseg_path, 'Adobe', 'raw')
        extract_zip(osp.join(adobe_path, 'adobe.zip'), adobe_path)
        adobe_meshes = osp.join(adobe_path, 'meshes')
        adobe_meshes = osp.join(adobe_meshes, '{}.ply')
        adobe_segs = osp.join(adobe_path, 'segs', '{}.pt')
        for i in progressbar.progressbar(range(41)):
            data = read_ply(adobe_meshes.format(i))
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data.y = torch.load(adobe_segs.format(i))
            if hasattr(data, 'sample_idx'):
                data.y = data.y[data.sample_idx]
            data_list.append(data)
        torch.save(self.collate(data_list), osp.join(shapeseg_path, 'adobe.pt'))
            
        #FAUST
        print('Processing FAUST')
        faust_path = osp.join(shapeseg_path, 'FAUST', 'raw')
        extract_zip(osp.join(faust_path, 'faust.zip'), faust_path)
        faust_meshes = osp.join(faust_path, 'meshes')
        faust_meshes = osp.join(faust_meshes, 'tr_reg_{0:03d}.ply')
        faust_segs = torch.load(osp.join(faust_path, 'segs', 'faust_seg.pt'))
        for i in progressbar.progressbar(range(100)):
            data = read_ply(faust_meshes.format(i))
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data.y = faust_segs
            if hasattr(data, 'sample_idx'):
                data.y = data.y[data.sample_idx]
            data_list.append(data)
        torch.save(self.collate(data_list), osp.join(shapeseg_path, 'faust.pt'))

        #MIT
        print('Processing MIT')
        mit_path = osp.join(shapeseg_path, 'MIT', 'raw')
        extract_zip(osp.join(mit_path, 'mit.zip'), mit_path)
        mit_meshes = osp.join(mit_path, 'meshes')
        mit_seg = osp.join(mit_path, 'segs')
        for filename in progressbar.progressbar(osls(mit_meshes)):
            data = read_obj(osp.join(mit_meshes, filename))
            seg_path = osp.join(mit_seg, filename.replace('.obj', '.eseg'))
            segs = torch.from_numpy(np.loadtxt(seg_path)).long()
            data.y = edge_to_vertex_labels(data.face, segs, data.num_nodes)
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data_list.append(data)
        torch.save(self.collate(data_list), osp.join(shapeseg_path, 'mit.pt'))

        #SCAPE
        print('Processing SCAPE')
        scape_path = osp.join(shapeseg_path, 'SCAPE', 'raw')
        extract_zip(osp.join(scape_path, 'scape.zip'), scape_path)
        scape_meshes = osp.join(scape_path, 'meshes')
        scape_meshes = osp.join(scape_meshes, '{}.ply')
        scape_segs = torch.load(osp.join(scape_path, 'segs', 'scape_seg.pt'))
        for i in progressbar.progressbar(range(71)):
            data = read_ply(scape_meshes.format(i))
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data.y = scape_segs
            if hasattr(data, 'sample_idx'):
                data.y = data.y[data.sample_idx]
            data_list.append(data)
        torch.save(self.collate(data_list), osp.join(shapeseg_path, 'scape.pt'))

        torch.save(self.collate(data_list), self.processed_paths[0])
        data_list = []

        #SHREC
        print('Processing SHREC')
        shrec_path = osp.join(shapeseg_path, 'SHREC', 'raw')
        extract_zip(osp.join(shrec_path, 'shrec.zip'), shrec_path)
        shrec_meshes = osp.join(shrec_path, 'meshes')
        shrec_meshes = osp.join(shrec_meshes, '{}.ply')
        shrec_segs = osp.join(shrec_path, 'segs', '{}.pt')
        for i in progressbar.progressbar(range(18)):
            data = read_ply(shrec_meshes.format(i))
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data.y = torch.load(shrec_segs.format(i))
            if hasattr(data, 'sample_idx'):
                data.y = data.y[data.sample_idx]
            data_list.append(data)

        torch.save(self.collate(data_list), self.processed_paths[1])


        shutil.rmtree(osp.join(self.raw_dir, 'ShapeSeg'))